public void deleteMapping (String jobName) throws IOException { this.stateStore.delete(this.distributedStateStoreName, jobName); }
@Override public Void call() throws Exception { TaskState taskState = taskStateStore.getAll(outputTaskStateDir.getName(), taskStateName).get(0); taskStateQueue.add(taskState); taskStateStore.delete(outputTaskStateDir.getName(), taskStateName); return null; } }, "Deserialize state for " + taskStateName);
public void deleteStateStore(String storeName) throws IOException { this.datasetStateStore.delete(storeName); }
@AfterClass public void tearDown() throws IOException { this.datasetStateStore.delete(JOB_NAME); }
/** * Delete persisted {@link WorkUnit}s and {@link JobState} upon job completion. */ private void cleanupWorkingDirectory() throws IOException { LOGGER.info("Deleting persisted work units for job " + this.jobContext.getJobId()); stateStores.getWuStateStore().delete(this.jobContext.getJobId()); // delete the directory that stores the task state files stateStores.getTaskStateStore().delete(outputTaskStateDir.getName()); LOGGER.info("Deleting job state file for job " + this.jobContext.getJobId()); if (this.stateStores.haveJobStateStore()) { this.stateStores.getJobStateStore().delete(this.jobContext.getJobId()); } else { Path jobStateFilePath = GobblinClusterUtils.getJobStateFilePath(false, this.appWorkDir, this.jobContext.getJobId()); this.fs.delete(jobStateFilePath, false); } }
@AfterClass public void tearDown() throws IOException { dbJobStateStore.delete(TEST_JOB_NAME); dbDatasetStateStore.delete(TEST_JOB_NAME); } }
@BeforeClass public void setUp() throws Exception { Properties properties = new Properties(); try (FileReader fr = new FileReader("gobblin-test/resource/gobblin.test.properties")) { properties.load(fr); } this.datasetStateStore = new FsStateStore<>( properties.getProperty(ConfigurationKeys.STATE_STORE_FS_URI_KEY, ConfigurationKeys.LOCAL_FS_URI), properties.getProperty(ConfigurationKeys.STATE_STORE_ROOT_DIR_KEY), JobState.DatasetState.class); // clear data that might be there from a prior run this.datasetStateStore.delete(JOB_NAME); this.jobConfig.putAll(properties); this.jobConfig.setProperty(ConfigurationKeys.JOB_NAME_KEY, JOB_NAME); this.jobConfig.setProperty(ConfigurationKeys.SOURCE_CLASS_KEY, DummySource.class.getName()); this.jobConfig.setProperty(ConfigurationKeys.WRITER_BUILDER_CLASS, DummyDataWriterBuilder.class.getName()); }
@Test(dependsOnMethods = "testGetPreviousDatasetStatesByUrns") public void testDeleteJobState() throws IOException { JobState jobState = zkJobStateStore.get(TEST_JOB_NAME, zkDatasetStateStore.CURRENT_DATASET_STATE_FILE_SUFFIX + zkDatasetStateStore.DATASET_STATE_STORE_TABLE_SUFFIX, TEST_JOB_ID); Assert.assertNotNull(jobState); Assert.assertEquals(jobState.getJobId(), TEST_JOB_ID); zkJobStateStore.delete(TEST_JOB_NAME); jobState = zkJobStateStore.get(TEST_JOB_NAME, zkDatasetStateStore.CURRENT_DATASET_STATE_FILE_SUFFIX + zkDatasetStateStore.DATASET_STATE_STORE_TABLE_SUFFIX, TEST_JOB_ID); Assert.assertNull(jobState); }
@Test(dependsOnMethods = "testGetPreviousDatasetStatesByUrns") public void testDeleteJobState() throws IOException { JobState jobState = dbJobStateStore.get(TEST_JOB_NAME, dbDatasetStateStore.CURRENT_DATASET_STATE_FILE_SUFFIX + dbDatasetStateStore.DATASET_STATE_STORE_TABLE_SUFFIX, TEST_JOB_ID); Assert.assertNotNull(jobState); Assert.assertEquals(jobState.getJobId(), TEST_JOB_ID); dbJobStateStore.delete(TEST_JOB_NAME); jobState = dbJobStateStore.get(TEST_JOB_NAME, dbDatasetStateStore.CURRENT_DATASET_STATE_FILE_SUFFIX + dbDatasetStateStore.DATASET_STATE_STORE_TABLE_SUFFIX, TEST_JOB_ID); Assert.assertNull(jobState); }
taskStateStore.delete(jobId, taskId + AbstractJobLauncher.TASK_STATE_STORE_TABLE_SUFFIX);
@BeforeClass public void setUp() throws Exception { ClassAliasResolver<StateStore.Factory> resolver = new ClassAliasResolver<>(StateStore.Factory.class); stateStoreFactory = resolver.resolveClass("fs").newInstance(); config = ConfigFactory.empty().withValue(ConfigurationKeys.STATE_STORE_FS_URI_KEY, ConfigValueFactory.fromAnyRef("file:///")).withValue(ConfigurationKeys.STATE_STORE_ROOT_DIR_KEY, ConfigValueFactory.fromAnyRef("metastore-test")).withValue("fs.permissions.umask-mode", ConfigValueFactory.fromAnyRef("022")); this.stateStore = stateStoreFactory.createStateStore(config, State.class); // cleanup in case files left behind by a prior run this.stateStore.delete("testStore"); this.stateStore.delete("testStore2"); }
@BeforeClass public void setUp() throws Exception { ConfigBuilder configBuilder = ConfigBuilder.create(); testingServer = new TestingServer(-1); zkJobStateStore = new ZkStateStore<>(testingServer.getConnectString(), "/STATE_STORE/TEST", false, JobState.class); configBuilder.addPrimitive(ZkStateStoreConfigurationKeys.STATE_STORE_ZK_CONNECT_STRING_KEY, testingServer.getConnectString()); configBuilder.addPrimitive(ConfigurationKeys.STATE_STORE_ROOT_DIR_KEY, "/STATE_STORE/TEST2"); ClassAliasResolver<DatasetStateStore.Factory> resolver = new ClassAliasResolver<>(DatasetStateStore.Factory.class); DatasetStateStore.Factory stateStoreFactory = resolver.resolveClass("zk").newInstance(); zkDatasetStateStore = stateStoreFactory.createStateStore(configBuilder.build()); // clear data that may have been left behind by a prior test run zkJobStateStore.delete(TEST_JOB_NAME); zkDatasetStateStore.delete(TEST_JOB_NAME); zkJobStateStore.delete(TEST_JOB_NAME2); zkDatasetStateStore.delete(TEST_JOB_NAME2); }
@BeforeClass public void setUp() throws Exception { testMetastoreDatabase = TestMetastoreDatabaseFactory.get(); String jdbcUrl = testMetastoreDatabase.getJdbcUrl(); ConfigBuilder configBuilder = ConfigBuilder.create(); BasicDataSource mySqlDs = new BasicDataSource(); mySqlDs.setDriverClassName(ConfigurationKeys.DEFAULT_STATE_STORE_DB_JDBC_DRIVER); mySqlDs.setDefaultAutoCommit(false); mySqlDs.setUrl(jdbcUrl); mySqlDs.setUsername(TEST_USER); mySqlDs.setPassword(TEST_PASSWORD); dbJobStateStore = new MysqlStateStore<>(mySqlDs, TEST_STATE_STORE, false, JobState.class); configBuilder.addPrimitive(ConfigurationKeys.STATE_STORE_DB_URL_KEY, jdbcUrl); configBuilder.addPrimitive(ConfigurationKeys.STATE_STORE_DB_USER_KEY, TEST_USER); configBuilder.addPrimitive(ConfigurationKeys.STATE_STORE_DB_PASSWORD_KEY, TEST_PASSWORD); ClassAliasResolver<DatasetStateStore.Factory> resolver = new ClassAliasResolver<>(DatasetStateStore.Factory.class); DatasetStateStore.Factory stateStoreFactory = resolver.resolveClass("mysql").newInstance(); dbDatasetStateStore = stateStoreFactory.createStateStore(configBuilder.build()); // clear data that may have been left behind by a prior test run dbJobStateStore.delete(TEST_JOB_NAME); dbDatasetStateStore.delete(TEST_JOB_NAME); dbJobStateStore.delete(TEST_JOB_NAME2); dbDatasetStateStore.delete(TEST_JOB_NAME2); }
@Override public Void call() throws Exception { TaskState taskState = taskStateStore.getAll(outputTaskStateDir.getName(), taskStateName).get(0); taskStateQueue.add(taskState); taskStateStore.delete(outputTaskStateDir.getName(), taskStateName); return null; } }, "Deserialize state for " + taskStateName);
/** * Delete persisted {@link WorkUnit}s and {@link JobState} upon job completion. */ private void cleanupWorkingDirectory() throws IOException { LOGGER.info("Deleting persisted work units for job " + this.jobContext.getJobId()); stateStores.getWuStateStore().delete(this.jobContext.getJobId()); // delete the directory that stores the task state files stateStores.getTaskStateStore().delete(outputTaskStateDir.getName()); LOGGER.info("Deleting job state file for job " + this.jobContext.getJobId()); if (this.stateStores.haveJobStateStore()) { this.stateStores.getJobStateStore().delete(this.jobContext.getJobId()); } else { Path jobStateFilePath = GobblinClusterUtils.getJobStateFilePath(false, this.appWorkDir, this.jobContext.getJobId()); this.fs.delete(jobStateFilePath, false); } }
taskStateStore.delete(jobId, taskId + AbstractJobLauncher.TASK_STATE_STORE_TABLE_SUFFIX);