Refine search
/** * Write the last processed commit githash to the checkpoint file * @param gitHash * @throws IOException */ private void writeCheckpoint(String gitHash) throws IOException { // write to a temporary name then rename to make the operation atomic when the file system allows a file to be // replaced File tmpCheckpointFile = new File(this.repoDir, CHECKPOINT_FILE_TMP); File checkpointFile = new File(this.repoDir, CHECKPOINT_FILE); Files.write(gitHash, tmpCheckpointFile, Charsets.UTF_8); Files.move(tmpCheckpointFile, checkpointFile); }
public static String download(String parentPath, String fileName, InputStream in) { Closer closer = Closer.create(); try { File imageDir = new File(parentPath); if(!imageDir.exists()) { imageDir.mkdirs(); } File imageFile = new File(imageDir, fileName); Files.write(ByteStreams.toByteArray(in), imageFile); return imageFile.getAbsolutePath(); } catch(Exception ex) { ex.printStackTrace(); return null; } finally { try { closer.close(); } catch (IOException e) { closer = null; } } }
File imageDir = new File(parentPath); if(!imageDir.exists()) { imageDir.mkdirs(); File imageFile = new File(imageDir, fileName); InputStream in = closer.register(new URL(imgUrl).openStream()); Files.write(ByteStreams.toByteArray(in), imageFile); return imageFile.getAbsolutePath(); } catch(Exception ex) {
/** * {@inheritDoc} */ @Override public synchronized void writeCheckpoint(Dag<JobExecutionPlan> dag) throws IOException { // write to a temporary name then rename to make the operation atomic when the file system allows a file to be // replaced String fileName = DagManagerUtils.generateDagId(dag) + DAG_FILE_EXTENSION; String serializedDag = serializeDag(dag); File tmpCheckpointFile = new File(this.dagCheckpointDir, fileName + ".tmp"); File checkpointFile = new File(this.dagCheckpointDir, fileName); Files.write(serializedDag, tmpCheckpointFile, Charsets.UTF_8); Files.move(tmpCheckpointFile, checkpointFile); }
public void saveScheduleOffsetCheckpoint(Map<Long, Long> offsets) { ensureDir(config.getScheduleOffsetCheckpointPath()); final byte[] data = SERDE.toBytes(offsets); Preconditions.checkState(data != null, "Serialized checkpoint data should not be null."); if (data.length == 0) { return; } final File checkpoint = new File(config.getScheduleOffsetCheckpointPath(), SCHEDULE_OFFSET_CHECKPOINT); try { Files.write(data, checkpoint); } catch (IOException e) { LOGGER.error("write data into schedule checkpoint file failed. file={}", checkpoint, e); throw new RuntimeException("write checkpoint data failed.", e); } }
@Override public void startInstance() throws Exception { Details details = new Details(exhibitor); String javaEnvironmentScript = exhibitor.getConfigManager().getConfig().getString(StringConfigs.JAVA_ENVIRONMENT); String log4jProperties = exhibitor.getConfigManager().getConfig().getString(StringConfigs.LOG4J_PROPERTIES); prepConfigFile(details); if ( (javaEnvironmentScript != null) && (javaEnvironmentScript.trim().length() > 0) ) { File envFile = new File(details.configDirectory, "java.env"); Files.write(javaEnvironmentScript, envFile, Charset.defaultCharset()); } if ( (log4jProperties != null) && (log4jProperties.trim().length() > 0) ) { File log4jFile = new File(details.configDirectory, "log4j.properties"); Files.write(log4jProperties, log4jFile, Charset.defaultCharset()); } ProcessBuilder builder = buildZkServerScript("start"); exhibitor.getProcessMonitor().monitor(ProcessTypes.ZOOKEEPER, builder.start(), null, ProcessMonitor.Mode.LEAVE_RUNNING_ON_INTERRUPT, ProcessMonitor.Streams.BOTH); exhibitor.getLog().add(ActivityLog.Type.INFO, "Process started via: " + builder.command().get(0)); }
File scriptOut = new File(outputDirectory, Integer.toString(IndexType.CLIENTSCRIPT.getNumber())); scriptOut.mkdirs(); byte[] packedScript = saver.save(script); File targetFile = new File(scriptOut, Integer.toString(script.getId())); Files.write(packedScript, targetFile); File hashFile = new File(scriptDirectory, Files.getNameWithoutExtension(scriptFile.getName()) + ".hash"); if (hashFile.exists())
@Test public void wholeTextFiles() throws Exception { byte[] content1 = "spark is easy to use.\n".getBytes(StandardCharsets.UTF_8); byte[] content2 = "spark is also easy to use.\n".getBytes(StandardCharsets.UTF_8); String tempDirName = tempDir.getAbsolutePath(); String path1 = new Path(tempDirName, "part-00000").toUri().getPath(); String path2 = new Path(tempDirName, "part-00001").toUri().getPath(); Files.write(content1, new File(path1)); Files.write(content2, new File(path2)); Map<String, String> container = new HashMap<>(); container.put(path1, new Text(content1).toString()); container.put(path2, new Text(content2).toString()); JavaPairRDD<String, String> readRDD = sc.wholeTextFiles(tempDirName, 3); List<Tuple2<String, String>> result = readRDD.collect(); for (Tuple2<String, String> res : result) { // Note that the paths from `wholeTextFiles` are in URI format on Windows, // for example, file:/C:/a/b/c. assertEquals(res._2(), container.get(new Path(res._1()).toUri().getPath())); } }
@Test public void wholeTextFiles() throws Exception { byte[] content1 = "spark is easy to use.\n".getBytes(StandardCharsets.UTF_8); byte[] content2 = "spark is also easy to use.\n".getBytes(StandardCharsets.UTF_8); String tempDirName = tempDir.getAbsolutePath(); String path1 = new Path(tempDirName, "part-00000").toUri().getPath(); String path2 = new Path(tempDirName, "part-00001").toUri().getPath(); Files.write(content1, new File(path1)); Files.write(content2, new File(path2)); Map<String, String> container = new HashMap<>(); container.put(path1, new Text(content1).toString()); container.put(path2, new Text(content2).toString()); JavaPairRDD<String, String> readRDD = sc.wholeTextFiles(tempDirName, 3); List<Tuple2<String, String>> result = readRDD.collect(); for (Tuple2<String, String> res : result) { // Note that the paths from `wholeTextFiles` are in URI format on Windows, // for example, file:/C:/a/b/c. assertEquals(res._2(), container.get(new Path(res._1()).toUri().getPath())); } }
@Test public void wholeTextFiles() throws Exception { byte[] content1 = "spark is easy to use.\n".getBytes(StandardCharsets.UTF_8); byte[] content2 = "spark is also easy to use.\n".getBytes(StandardCharsets.UTF_8); String tempDirName = tempDir.getAbsolutePath(); String path1 = new Path(tempDirName, "part-00000").toUri().getPath(); String path2 = new Path(tempDirName, "part-00001").toUri().getPath(); Files.write(content1, new File(path1)); Files.write(content2, new File(path2)); Map<String, String> container = new HashMap<>(); container.put(path1, new Text(content1).toString()); container.put(path2, new Text(content2).toString()); JavaPairRDD<String, String> readRDD = sc.wholeTextFiles(tempDirName, 3); List<Tuple2<String, String>> result = readRDD.collect(); for (Tuple2<String, String> res : result) { // Note that the paths from `wholeTextFiles` are in URI format on Windows, // for example, file:/C:/a/b/c. assertEquals(res._2(), container.get(new Path(res._1()).toUri().getPath())); } }
@Test public void testSimple() throws Exception { final File tmpDir = temporaryFolder.newFolder(); try { final File logDir = new File(tmpDir, "druid/logs"); final File logFile = new File(tmpDir, "log"); Files.write("blah", logFile, StandardCharsets.UTF_8); final TaskLogs taskLogs = new FileTaskLogs(new FileTaskLogsConfig(logDir)); taskLogs.pushTaskLog("foo", logFile); final Map<Long, String> expected = ImmutableMap.of(0L, "blah", 1L, "lah", -2L, "ah", -5L, "blah"); for (Map.Entry<Long, String> entry : expected.entrySet()) { final byte[] bytes = ByteStreams.toByteArray(taskLogs.streamTaskLog("foo", entry.getKey()).get().openStream()); final String string = StringUtils.fromUtf8(bytes); Assert.assertEquals(StringUtils.format("Read with offset %,d", entry.getKey()), string, entry.getValue()); } } finally { FileUtils.deleteDirectory(tmpDir); } }
@Test public void testPushTaskLogDirCreationFails() throws Exception { final File tmpDir = temporaryFolder.newFolder(); final File logDir = new File(tmpDir, "druid/logs"); final File logFile = new File(tmpDir, "log"); Files.write("blah", logFile, StandardCharsets.UTF_8); if (!tmpDir.setWritable(false)) { throw new RuntimeException("failed to make tmp dir read-only"); } final TaskLogs taskLogs = new FileTaskLogs(new FileTaskLogsConfig(logDir)); expectedException.expect(IOException.class); expectedException.expectMessage("Unable to create task log dir"); taskLogs.pushTaskLog("foo", logFile); }
@BeforeClass public void setUp() throws IOException { File textFile = new File(getFileDir(), "test.txt"); File dir = textFile.getParentFile(); if (!dir.exists() && !dir.mkdir()) { throw new IOException("Failed to create directory: " + dir); } if (!textFile.createNewFile()) { throw new IOException("Failed to create text file: " + textFile); } Files.write(TEXT, textFile, ConfigurationKeys.DEFAULT_CHARSET_ENCODING); this.sourceState = new SourceState(); this.sourceState.setProp(ConfigurationKeys.EXTRACT_TABLE_TYPE_KEY, Extract.TableType.SNAPSHOT_ONLY.toString()); this.sourceState.setProp(ConfigurationKeys.EXTRACT_NAMESPACE_NAME_KEY, "test"); this.sourceState.setProp(ConfigurationKeys.EXTRACT_TABLE_NAME_KEY, "test"); this.sourceState.setProp(HadoopFileInputSource.FILE_INPUT_PATHS_KEY, textFile.getAbsolutePath()); }
@Test public void test() throws IOException { File dumpDir = folder.newFolder(); int count = 0; try (Store store = new Store(StoreLocation.LOCATION)) { store.load(); StructManager loader = new StructManager(store); loader.load(); for (Map.Entry<Integer, StructDefinition> struct : loader.getStructs().entrySet()) { StructDefinition def = struct.getValue(); Files.write(gson.toJson(def), new File(dumpDir, struct.getKey() + ".json"), Charset.defaultCharset()); ++count; } } logger.info("Dumped {} structs to {}", count, dumpDir); } }
@Test (enabled=false) public void testFileLimit() throws Exception { File stateStoreDir = Files.createTempDir(); stateStoreDir.deleteOnExit(); File dataDir = Files.createTempDir(); dataDir.deleteOnExit(); String eventBusId = UUID.randomUUID().toString(); TestingEventBusAsserter asserter = new TestingEventBusAsserter(eventBusId); EmbeddedGobblin gobblin = new EmbeddedGobblin().setTemplate("resource:///templates/textFileBasedSourceTest.template") .setConfiguration(ConfigurationKeys.SOURCE_FILEBASED_DATA_DIRECTORY, dataDir.getAbsolutePath()) .setConfiguration(ConfigurationKeys.STATE_STORE_ROOT_DIR_KEY, stateStoreDir.getAbsolutePath()) .setConfiguration(GobblinTestEventBusWriter.FULL_EVENTBUSID_KEY, eventBusId) .setConfiguration(ConfigurationKeys.STATE_STORE_ENABLED, "true") .setConfiguration(ConfigurationKeys.SOURCE_FILEBASED_MAX_FILES_PER_RUN, "2"); Files.write("record1\nrecord2\nrecord3", new File(dataDir, "file1"), Charsets.UTF_8); Files.write("record4\nrecord5", new File(dataDir, "file2"), Charsets.UTF_8); Files.write("record6\nrecord7", new File(dataDir, "file3"), Charsets.UTF_8); gobblin.run(); // should only pull first 2 files Set<Object> events = asserter.getEvents().stream().map(TestingEventBuses.Event::getValue).collect(Collectors.toSet()); Assert.assertEquals(events, Sets.newHashSet("record1", "record2", "record3", "record4", "record5")); asserter.clear(); gobblin.run(); events = asserter.getEvents().stream().map(TestingEventBuses.Event::getValue).collect(Collectors.toSet()); Assert.assertEquals(events, Sets.newHashSet("record6", "record7")); asserter.clear(); }
@Test public void testCreateExternalTable() throws Exception { File tempDir = createTempDir(); File dataFile = new File(tempDir, "test.txt"); Files.write("hello\nworld\n", dataFile, UTF_8); @Language("SQL") String createTableSql = format("" + "CREATE TABLE %s.%s.test_create_external (\n" + " name varchar\n" + ")\n" + "WITH (\n" + " external_location = '%s',\n" + " format = 'TEXTFILE'\n" + ")", getSession().getCatalog().get(), getSession().getSchema().get(), new Path(tempDir.toURI().toASCIIString()).toString()); assertUpdate(createTableSql); MaterializedResult actual = computeActual("SHOW CREATE TABLE test_create_external"); assertEquals(actual.getOnlyValue(), createTableSql); actual = computeActual("SELECT name FROM test_create_external"); assertEquals(actual.getOnlyColumnAsSet(), ImmutableSet.of("hello", "world")); assertUpdate("DROP TABLE test_create_external"); // file should still exist after drop assertFile(dataFile); deleteRecursively(tempDir.toPath(), ALLOW_INSECURE); }