private boolean processCommand(String command) { String line = command.trim(); if (line.length() == 0 || line.startsWith("#")) { return true; } _logger.debugf("Doing command: %s\n", line); String[] words = line.split("\\s+"); return processCommand(words, null); }
private Path createTempFile(String filePrefix, String content) { Path tempFilePath; try { tempFilePath = Files.createTempFile(filePrefix, null); } catch (IOException e) { throw new BatfishException("Failed to create temporary file", e); } File tempFile = tempFilePath.toFile(); tempFile.deleteOnExit(); _logger.debugf("Creating temporary %s file: %s\n", filePrefix, tempFilePath.toAbsolutePath()); FileWriter writer; try { writer = new FileWriter(tempFile); writer.write(content + "\n"); writer.close(); } catch (IOException e) { throw new BatfishException("Failed to write content to temporary file", e); } return tempFilePath; }
private <S extends Serializable> SortedMap<String, S> deserializeObjects( Map<Path, String> namesByPath, Class<S> outputClass) { String outputClassName = outputClass.getName(); AtomicInteger completed = _newBatch.apply( String.format("Deserializing objects of type '%s' from files", outputClassName), namesByPath.size()); return new TreeMap<>( namesByPath .entrySet() .parallelStream() .collect( Collectors.toMap( Entry::getValue, entry -> { Path inputPath = entry.getKey(); String name = entry.getValue(); _logger.debugf( "Reading %s '%s' from '%s'\n", outputClassName, name, inputPath); S output = deserializeObject(inputPath, outputClass); completed.incrementAndGet(); return output; }))); }
@Override public SortedMap<String, Configuration> loadConfigurations() { NetworkSnapshot snapshot = getNetworkSnapshot(); _logger.debugf("Loading configurations for %s\n", snapshot); return loadConfigurations(snapshot); }
private SortedMap<Path, String> readFiles(Path directory, String description) { _logger.infof("\n*** READING FILES: %s ***\n", description); _logger.resetTimer(); SortedMap<Path, String> fileData = new TreeMap<>(); List<Path> filePaths; try (Stream<Path> paths = CommonUtil.list(directory)) { filePaths = paths .filter(path -> !path.getFileName().toString().startsWith(".")) .sorted() .collect(Collectors.toList()); } AtomicInteger completed = newBatch("Reading files: " + description, filePaths.size()); for (Path file : filePaths) { _logger.debugf("Reading: \"%s\"\n", file); String fileTextRaw = CommonUtil.readFile(file.toAbsolutePath()); String fileText = fileTextRaw + ((fileTextRaw.length() != 0) ? "\n" : ""); fileData.put(file, fileText); completed.incrementAndGet(); } _logger.printElapsedTime(); return fileData; }
private synchronized int executeUpdate(PreparedStatement update) { int triesLeft = MAX_DB_TRIES; String updateString = update.toString(); while (triesLeft > 0) { triesLeft--; try { _logger.debugf("Executing SQL update: %s\n", updateString); return update.executeUpdate(); } catch (SQLException e) { _logger.errorf("SQLException while executing query '%s': %s", updateString, e.getMessage()); _logger.errorf("Tries left = %d\n", triesLeft); if (triesLeft > 0) { try { if (!_dbConn.isValid(DB_VALID_CHECK_TIMEOUT_SECS)) { openDbConnection(); } } catch (SQLException e1) { return 0; } } } } return 0; }
.map( e -> { logger.debugf( "Reading and unzipping: %s '%s' from %s%n", outputClassName, e.getValue(), e.getKey());
@Override public SortedMap<String, Configuration> loadConfigurations(NetworkSnapshot snapshot) { // Do we already have configurations in the cache? SortedMap<String, Configuration> configurations = _cachedConfigurations.getIfPresent(snapshot); if (configurations != null) { return configurations; } _logger.debugf("Loading configurations for %s, cache miss", snapshot); // Next, see if we have an up-to-date configurations on disk. configurations = _storage.loadConfigurations(snapshot.getNetwork(), snapshot.getSnapshot()); if (configurations != null) { _logger.debugf("Loaded configurations for %s off disk", snapshot); postProcessSnapshot(configurations); } else { // Otherwise, we have to parse the configurations. Fall back to old, hacky code. configurations = parseConfigurationsAndApplyEnvironment(); } _cachedConfigurations.put(snapshot, configurations); return configurations; }
private SortedMap<Path, String> readConfigurationFiles(Path testRigPath, String configsType) { _logger.infof("\n*** READING %s FILES ***\n", configsType); _logger.resetTimer(); SortedMap<Path, String> configurationData = new TreeMap<>(); Path configsPath = testRigPath.resolve(configsType); List<Path> configFilePaths = listAllFiles(configsPath); AtomicInteger completed = newBatch("Reading network configuration files", configFilePaths.size()); for (Path file : configFilePaths) { _logger.debugf("Reading: \"%s\"\n", file); String fileTextRaw = CommonUtil.readFile(file.toAbsolutePath()); String fileText = fileTextRaw + ((fileTextRaw.length() != 0) ? "\n" : ""); configurationData.put(file, fileText); completed.incrementAndGet(); } _logger.printElapsedTime(); return configurationData; }
@Nullable private synchronized ResultSet executeQuery(PreparedStatement query) { int triesLeft = MAX_DB_TRIES; while (triesLeft > 0) { triesLeft--; try { _logger.debugf("Executing SQL query: %s\n", query); return query.executeQuery(); } catch (SQLException e) { _logger.errorf("SQLException while executing query '%s': %s", query, e.getMessage()); _logger.errorf("Tries left = %d\n", triesLeft); if (triesLeft > 0) { try { if (!_dbConn.isValid(DB_VALID_CHECK_TIMEOUT_SECS)) { openDbConnection(); } } catch (SQLException e1) { return null; } } } catch (Exception e) { throw new BatfishException("Non-SQL-related exception occurred when executing query"); } } return null; }
List<String> args = new ArrayList<>(Arrays.asList(initialArgArray)); final String[] argArray = args.toArray(new String[] {}); _logger.debugf("Starting batfish worker with args: %s\n", Arrays.toString(argArray)); Thread thread = new Thread("batfishThread") {
String flatConfigText = e.getValue(); String outputFileAsString = outputFile.toString(); _logger.debugf("Writing config to \"%s\"...", outputFileAsString); CommonUtil.writeFile(outputFile, flatConfigText); _logger.debug("OK\n");
SortedMap<String, Configuration> loadCompressedConfigurations(NetworkSnapshot snapshot) { // Do we already have configurations in the cache? SortedMap<String, Configuration> configurations = _cachedCompressedConfigurations.getIfPresent(snapshot); if (configurations != null) { return configurations; } _logger.debugf("Loading configurations for %s, cache miss", snapshot); // Next, see if we have an up-to-date configurations on disk. configurations = _storage.loadCompressedConfigurations(_settings.getContainer(), snapshot.getSnapshot()); if (configurations != null) { return configurations; } else { computeCompressedDataPlane(); configurations = _cachedCompressedConfigurations.getIfPresent(snapshot); if (configurations == null) { throw new BatfishException("Could not compute compressed configs"); } return configurations; } }
.post(Entity.entity(multiPart, multiPart.getMediaType())); _logger.debugf("%s %s %s\n", response.getStatus(), response.getStatusInfo(), response);
private void runInteractive() { SignalHandler handler = signal -> _logger.debugf("Client: Ignoring signal: %s\n", signal); Signal.handle(new Signal("INT"), handler); try { while (!_exit) { try { String rawLine = _reader.readLine("batfish> "); if (rawLine == null) { break; } processCommand(rawLine); } catch (UserInterruptException e) { continue; } } } catch (EndOfFileException e) { // ignored } catch (Throwable t) { t.printStackTrace(); } finally { try { _reader.getHistory().save(); } catch (IOException e) { e.printStackTrace(); } } }
private void serializeAwsConfigs( Path testRigPath, Path outputPath, ParseVendorConfigurationAnswerElement pvcae) { Map<Path, String> configurationData = readConfigurationFiles(testRigPath, BfConsts.RELPATH_AWS_CONFIGS_DIR); AwsConfiguration config; try (ActiveSpan parseAwsConfigsSpan = GlobalTracer.get().buildSpan("Parse AWS configs").startActive()) { assert parseAwsConfigsSpan != null; // avoid unused warning config = parseAwsConfigurations(configurationData, pvcae); } _logger.info("\n*** SERIALIZING AWS CONFIGURATION STRUCTURES ***\n"); _logger.resetTimer(); outputPath.toFile().mkdirs(); Path currentOutputPath = outputPath.resolve(BfConsts.RELPATH_AWS_CONFIGS_FILE); _logger.debugf("Serializing AWS to \"%s\"...", currentOutputPath); serializeObject(config, currentOutputPath); _logger.debug("OK\n"); _logger.printElapsedTime(); }
_logger.outputf(".... %s\n", batches.get(i)); } else { _logger.debugf(".... %s\n", batches.get(i));