/** * remove a {@link FlowSpec} for a deleted or renamed flow config * @param change */ @Override public void removeChange(DiffEntry change) { if (checkConfigFilePath(change.getOldPath())) { Path configFilePath = new Path(this.repositoryDir, change.getOldPath()); String flowName = FSSpecStore.getSpecName(configFilePath); String flowGroup = FSSpecStore.getSpecGroup(configFilePath); // build a dummy config to get the proper URI for delete Config dummyConfig = ConfigBuilder.create() .addPrimitive(ConfigurationKeys.FLOW_GROUP_KEY, flowGroup) .addPrimitive(ConfigurationKeys.FLOW_NAME_KEY, flowName) .build(); FlowSpec spec = FlowSpec.builder() .withConfig(dummyConfig) .withVersion(SPEC_VERSION) .withDescription(SPEC_DESCRIPTION) .build(); this.flowCatalog.remove(spec.getUri()); } }
public static ConfigBuilder create() { return new ConfigBuilder(Optional.<String> absent()); }
public void testSerDerWithEmptyRequester() throws IOException { List<ServiceRequester> list = new ArrayList<>(); RequesterService rs = new NoopRequesterService(ConfigBuilder.create().build()); String serialize = rs.serialize(list); Properties props = new Properties(); props.put(RequesterService.REQUESTER_LIST, serialize); Config initConfig = ConfigBuilder.create().build(); Config config = initConfig.withFallback(ConfigFactory.parseString(props.toString()).resolve()); Properties props2 = ConfigUtils.configToProperties(config); String serialize2 = props2.getProperty(RequesterService.REQUESTER_LIST); Assert.assertTrue(serialize.equals(serialize2)); List<ServiceRequester> list2 = rs.deserialize(serialize); Assert.assertTrue(list.equals(list2)); }
@BeforeClass public void setUp() throws Exception { ConfigBuilder configBuilder = ConfigBuilder.create(); JobStatusRetriever jobStatusRetriever = new TestJobStatusRetriever(); final FlowStatusGenerator flowStatusGenerator = FlowStatusGenerator.builder().jobStatusRetriever(jobStatusRetriever).build(); Injector injector = Guice.createInjector(new Module() { @Override public void configure(Binder binder) { binder.bind(FlowStatusGenerator.class).annotatedWith(Names.named(FlowStatusResource.FLOW_STATUS_GENERATOR_INJECT_NAME)) .toInstance(flowStatusGenerator); } }); _server = EmbeddedRestliServer.builder().resources( Lists.<Class<? extends BaseResource>>newArrayList(FlowStatusResource.class)).injector(injector).build(); _server.startAsync(); _server.awaitRunning(); _client = new FlowStatusClient(String.format("http://localhost:%s/", _server.getPort())); messageJoiner = Joiner.on(FlowStatusResource.MESSAGE_SEPARATOR); }
public void testSerDerWithConfig() throws IOException { ServiceRequester sr1 = new ServiceRequester("kafkaetl", "user", "dv"); ServiceRequester sr2 = new ServiceRequester("gobblin", "group", "dv"); ServiceRequester sr3 = new ServiceRequester("crm-backend", "service", "cert"); List<ServiceRequester> list = new ArrayList<>(); sr1.getProperties().put("customKey", "${123}"); list.add(sr1); list.add(sr2); list.add(sr3); RequesterService rs = new NoopRequesterService(ConfigBuilder.create().build()); String serialize = rs.serialize(list); Properties props = new Properties(); props.put(RequesterService.REQUESTER_LIST, serialize); Config initConfig = ConfigBuilder.create().build(); Config config = initConfig.withFallback(ConfigFactory.parseString(props.toString()).resolve()); Properties props2 = ConfigUtils.configToProperties(config); String serialize2 = props2.getProperty(RequesterService.REQUESTER_LIST); Assert.assertTrue(serialize.equals(serialize2)); List<ServiceRequester> list2 = rs.deserialize(serialize); Assert.assertTrue(list.equals(list2)); } }
AsyncHttpWriterBuilder<D, RQ, RP> fromState(State state) { if (!(state instanceof WorkUnitState)) { throw new IllegalStateException(String.format("AsyncHttpWriterBuilder requires a %s on construction.", WorkUnitState.class.getSimpleName())); } this.state = (WorkUnitState) state; this.metricContext = Instrumented.getMetricContext(this.state, AsyncHttpWriter.class); this.broker = this.state.getTaskBroker(); Config config = ConfigBuilder.create().loadProps(state.getProperties(), CONF_PREFIX).build(); config = config.withFallback(FALLBACK); this.maxOutstandingWrites = config.getInt(MAX_OUTSTANDING_WRITES); this.maxAttempts = config.getInt(MAX_ATTEMPTS); return fromConfig(config); }
ConfigBuilder configBuilder = ConfigBuilder.create() .addPrimitive(ConfigurationKeys.FLOW_GROUP_KEY, flowConfig.getId().getFlowGroup()) .addPrimitive(ConfigurationKeys.FLOW_NAME_KEY, flowConfig.getId().getFlowName()); configBuilder.addPrimitive(ConfigurationKeys.JOB_SCHEDULE_KEY, schedule.getCronSchedule()); configBuilder.addPrimitive(ConfigurationKeys.FLOW_RUN_IMMEDIATELY, schedule.isRunImmediately()); } else { configBuilder.addPrimitive(ConfigurationKeys.FLOW_EXECUTION_ID_KEY, String.valueOf(System.currentTimeMillis())); Config config = configBuilder.build();
public static ConfigBuilder create(String originDescription) { return new ConfigBuilder(Optional.of(originDescription)); }
public HttpJoinConverter init(WorkUnitState workUnitState) { super.init(workUnitState); Config config = ConfigBuilder.create().loadProps(workUnitState.getProperties(), CONF_PREFIX).build(); config = config.withFallback(DEFAULT_FALLBACK); httpClient = createHttpClient(config, workUnitState.getTaskBroker()); responseHandler = createResponseHandler(config); requestBuilder = createRequestBuilder(config); return this; }
/** * Create a {@link Dag<JobExecutionPlan>} with one parent and one child. * @return a Dag. */ public Dag<JobExecutionPlan> buildDag(String id, Long flowExecutionId) throws URISyntaxException { List<JobExecutionPlan> jobExecutionPlans = new ArrayList<>(); for (int i = 0; i < 2; i++) { String suffix = Integer.toString(i); Config jobConfig = ConfigBuilder.create(). addPrimitive(ConfigurationKeys.FLOW_GROUP_KEY, "group" + id). addPrimitive(ConfigurationKeys.FLOW_NAME_KEY, "flow" + id). addPrimitive(ConfigurationKeys.FLOW_EXECUTION_ID_KEY, flowExecutionId). addPrimitive(ConfigurationKeys.JOB_NAME_KEY, "job" + suffix).build(); if (i > 0) { jobConfig = jobConfig.withValue(ConfigurationKeys.JOB_DEPENDENCIES, ConfigValueFactory.fromAnyRef("job" + (i - 1))); } JobSpec js = JobSpec.builder("test_job" + suffix).withVersion(suffix).withConfig(jobConfig). withTemplate(new URI("job" + suffix)).build(); SpecExecutor specExecutor = new InMemorySpecExecutor(ConfigFactory.empty()); JobExecutionPlan jobExecutionPlan = new JobExecutionPlan(js, specExecutor); jobExecutionPlan.setExecutionStatus(ExecutionStatus.RUNNING); jobExecutionPlans.add(jobExecutionPlan); } return new JobExecutionPlanDagFactory().createDag(jobExecutionPlans); }
public static ConfigBuilder create() { return new ConfigBuilder(Optional.<String> absent()); }
public AsyncHttpJoinConverter init(WorkUnitState workUnitState) { super.init(workUnitState); Config config = ConfigBuilder.create().loadProps(workUnitState.getProperties(), CONF_PREFIX).build(); config = config.withFallback(DEFAULT_FALLBACK); skipFailedRecord = workUnitState.getPropAsBoolean(ConfigurationKeys.CONVERTER_SKIP_FAILED_RECORD, false); httpClient = createHttpClient(config, workUnitState.getTaskBroker()); responseHandler = createResponseHandler(config); requestBuilder = createRequestBuilder(config); return this; }
for (int i = 0; i < numNodes; i++) { String suffix = Integer.toString(i); Config jobConfig = ConfigBuilder.create(). addPrimitive(ConfigurationKeys.FLOW_GROUP_KEY, "group" + id). addPrimitive(ConfigurationKeys.FLOW_NAME_KEY, "flow" + id). addPrimitive(ConfigurationKeys.FLOW_EXECUTION_ID_KEY, flowExecutionId). addPrimitive(ConfigurationKeys.JOB_GROUP_KEY, "group" + id). addPrimitive(ConfigurationKeys.JOB_NAME_KEY, "job" + suffix). addPrimitive(ConfigurationKeys.FLOW_FAILURE_OPTION, flowFailureOption).build(); if ((i == 1) || (i == 2)) { jobConfig = jobConfig.withValue(ConfigurationKeys.JOB_DEPENDENCIES, ConfigValueFactory.fromAnyRef("job0"));
public static ConfigBuilder create(String originDescription) { return new ConfigBuilder(Optional.of(originDescription)); }
@BeforeClass public void setUp() throws Exception { this.testMetastoreDatabase = TestMetastoreDatabaseFactory.get(); String jdbcUrl = this.testMetastoreDatabase.getJdbcUrl(); ConfigBuilder configBuilder = ConfigBuilder.create(); BasicDataSource mySqlDs = new BasicDataSource(); mySqlDs.setDriverClassName(ConfigurationKeys.DEFAULT_STATE_STORE_DB_JDBC_DRIVER); mySqlDs.setDefaultAutoCommit(false); mySqlDs.setUrl(jdbcUrl); mySqlDs.setUsername(TEST_USER); mySqlDs.setPassword(TEST_PASSWORD); this.dbJobStateStore = new MysqlStateStore<>(mySqlDs, TEST_STATE_STORE, false, JobState.class); configBuilder.addPrimitive("selection.timeBased.lookbackTime", "10m"); configBuilder.addPrimitive(ConfigurationKeys.STATE_STORE_TYPE_KEY, "mysql"); configBuilder.addPrimitive(ConfigurationKeys.STATE_STORE_DB_TABLE_KEY, TEST_STATE_STORE); configBuilder.addPrimitive(ConfigurationKeys.STATE_STORE_DB_URL_KEY, jdbcUrl); configBuilder.addPrimitive(ConfigurationKeys.STATE_STORE_DB_USER_KEY, TEST_USER); configBuilder.addPrimitive(ConfigurationKeys.STATE_STORE_DB_PASSWORD_KEY, TEST_PASSWORD); ClassAliasResolver<DatasetStateStore.Factory> resolver = new ClassAliasResolver<>(DatasetStateStore.Factory.class); DatasetStateStore.Factory stateStoreFactory = resolver.resolveClass("mysql").newInstance(); this.config = configBuilder.build(); this.dbDatasetStateStore = stateStoreFactory.createStateStore(configBuilder.build()); // clear data that may have been left behind by a prior test run this.dbJobStateStore.delete(TEST_JOB_NAME1); this.dbDatasetStateStore.delete(TEST_JOB_NAME1); this.dbJobStateStore.delete(TEST_JOB_NAME2); this.dbDatasetStateStore.delete(TEST_JOB_NAME2); }
@BeforeClass public void setUp() throws Exception { ConfigBuilder configBuilder = ConfigBuilder.create(); testingServer = new TestingServer(-1); zkJobStateStore = new ZkStateStore<>(testingServer.getConnectString(), "/STATE_STORE/TEST", false, JobState.class); configBuilder.addPrimitive(ZkStateStoreConfigurationKeys.STATE_STORE_ZK_CONNECT_STRING_KEY, testingServer.getConnectString()); configBuilder.addPrimitive(ConfigurationKeys.STATE_STORE_ROOT_DIR_KEY, "/STATE_STORE/TEST2"); ClassAliasResolver<DatasetStateStore.Factory> resolver = new ClassAliasResolver<>(DatasetStateStore.Factory.class); DatasetStateStore.Factory stateStoreFactory = resolver.resolveClass("zk").newInstance(); zkDatasetStateStore = stateStoreFactory.createStateStore(configBuilder.build()); // clear data that may have been left behind by a prior test run zkJobStateStore.delete(TEST_JOB_NAME); zkDatasetStateStore.delete(TEST_JOB_NAME); zkJobStateStore.delete(TEST_JOB_NAME2); zkDatasetStateStore.delete(TEST_JOB_NAME2); }