@Override public void onDeleteJob(URI deletedJobURI, String deletedJobVersion) { super.onDeleteJob(deletedJobURI, deletedJobVersion); try { JobSpec.Builder jobSpecBuilder = JobSpec.builder(deletedJobURI); Properties props = new Properties(); jobSpecBuilder.withVersion(deletedJobVersion).withConfigAsProperties(props); _jobSpecQueue.put(new ImmutablePair<SpecExecutor.Verb, Spec>(SpecExecutor.Verb.DELETE, jobSpecBuilder.build())); _metrics.jobSpecEnqCount.incrementAndGet(); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } }
@Override public Collection<Either<JobSpec, URI>> parseJobSpec(GobblinTrackingEvent event) { if (!acceptEvent(event)) { this.rejectedEvents.inc(); return Lists.newArrayList(); } String datasetURN = event.getMetadata().get(SlaEventKeys.DATASET_URN_KEY); URI jobSpecURI = PathUtils.mergePaths(new Path(this.baseURI), new Path(datasetURN)).toUri(); Map<String, String> jobConfigMap = Maps.newHashMap(); for (Map.Entry<String, String> entry : this.extractKeys.entrySet()) { if (event.getMetadata().containsKey(entry.getKey())) { jobConfigMap.put(entry.getValue(), event.getMetadata().get(entry.getKey())); } } Config jobConfig = ConfigFactory.parseMap(jobConfigMap); JobSpec jobSpec = JobSpec.builder(jobSpecURI).withTemplate(this.template).withConfig(jobConfig).build(); return Lists.newArrayList(Either.<JobSpec, URI>left(jobSpec)); }
@Override public Future<?> deleteSpec(URI deletedSpecURI, Properties headers) { // Delete project JobSpec jobSpec = new JobSpec.Builder(deletedSpecURI).build(); try { AzkabanJobHelper.deleteAzkabanJob(_sessionId, new AzkabanProjectConfig(jobSpec)); } catch (IOException e) { throw new RuntimeException("Issue in deleting Azkaban project.", e); } throw new UnsupportedOperationException(); }
String jobSpecUri = Files.getNameWithoutExtension(new Path(jobTemplate.getUri()).getName()); jobExecutionPlans.add(new JobExecutionPlan(JobSpec.builder(jobSpecUri).withConfig(jobTemplate.getRawTemplateConfig()). withVersion("1").withTemplate(jobTemplate.getUri()).build(), specExecutor));
JobSpec.Builder b = new JobSpec.Builder("test:job"); JobSpec js1 = b.build(); byte[] serializedBytes = SerializationUtils.serialize(js1); JobSpec js1Deserialized = SerializationUtils.deserialize(serializedBytes); .withConfigAsProperties(props); JobSpec js2 = b.build(); serializedBytes = SerializationUtils.serialize(js2); JobSpec js2Deserialized = SerializationUtils.deserialize(serializedBytes);
.withConfig(ConfigFactory.empty() .withValue(ConfigurationKeys.JOB_NAME_KEY, ConfigValueFactory.fromAnyRef("myJob"))) .build(); JobExecution je1 = JobExecutionUpdatable.createFromJobSpec(js1);
@Test public void testCallbacks() throws Exception { InMemoryJobCatalog cat = new InMemoryJobCatalog(); cat.startAsync(); cat.awaitRunning(1, TimeUnit.SECONDS); JobCatalogListener l = Mockito.mock(JobCatalogListener.class); JobSpec js1_1 = JobSpec.builder("test:job1").withVersion("1").build(); JobSpec js1_2 = JobSpec.builder("test:job1").withVersion("2").build(); JobSpec js1_3 = JobSpec.builder("test:job1").withVersion("3").build(); JobSpec js2 = JobSpec.builder("test:job2").withVersion("1").build(); cat.put(js1_1); cat.addListener(l); cat.put(js1_2); cat.put(js2); cat.put(js1_3); cat.remove(js2.getUri()); cat.remove(new URI("test:dummy_job")); cat.removeListener(l); cat.remove(js1_3.getUri()); Mockito.verify(l).onAddJob(Mockito.eq(js1_1)); Mockito.verify(l).onUpdateJob(Mockito.eq(js1_2)); Mockito.verify(l).onAddJob(Mockito.eq(js2)); Mockito.verify(l).onUpdateJob(Mockito.eq(js1_3)); Mockito.verify(l).onDeleteJob(Mockito.eq(js2.getUri()), Mockito.eq(js2.getVersion())); Mockito.verifyNoMoreInteractions(l); cat.stopAsync(); cat.awaitTerminated(1, TimeUnit.SECONDS); }
Assert.assertTrue(factory1.getFs().exists(factory1.getLockFileDir())); JobSpec js1 = JobSpec.builder("gobblin-test:job1").build(); FileBasedJobLock lock11 = factory1.getJobLock(js1); Assert.assertTrue(lock11.getLockFile().getName().startsWith(FileBasedJobLockFactory.getJobName(js1)));
withTemplate(new URI("job" + suffix)).build(); SpecExecutor specExecutor = InMemorySpecExecutor.createDummySpecExecutor(new URI("job" + i)); JobExecutionPlan jobExecutionPlan = new JobExecutionPlan(js, specExecutor);
.withConfig(ConfigFactory.empty() .withValue(ConfigurationKeys.JOB_NAME_KEY, ConfigValueFactory.fromAnyRef("myJob"))) .build(); JobExecution je1 = JobExecutionUpdatable.createFromJobSpec(js1);
/** * A method to create a {@link Dag <JobExecutionPlan>}. * @return a Dag. */ public Dag<JobExecutionPlan> buildDag(int numNodes, int startNodeId, boolean isForkable) throws URISyntaxException { List<JobExecutionPlan> jobExecutionPlans = new ArrayList<>(); Config baseConfig = ConfigBuilder.create(). addPrimitive(ConfigurationKeys.FLOW_GROUP_KEY, "group0"). addPrimitive(ConfigurationKeys.FLOW_NAME_KEY, "flow0"). addPrimitive(ConfigurationKeys.FLOW_EXECUTION_ID_KEY, System.currentTimeMillis()). addPrimitive(ConfigurationKeys.JOB_GROUP_KEY, "group0").build(); for (int i = startNodeId; i < startNodeId + numNodes; i++) { String suffix = Integer.toString(i); Config jobConfig = baseConfig.withValue(ConfigurationKeys.JOB_NAME_KEY, ConfigValueFactory.fromAnyRef("job" + suffix)); if (isForkable && (i == startNodeId + numNodes - 1)) { jobConfig = jobConfig.withValue(ConfigurationKeys.JOB_FORK_ON_CONCAT, ConfigValueFactory.fromAnyRef(true)); } if (i > startNodeId) { jobConfig = jobConfig.withValue(ConfigurationKeys.JOB_DEPENDENCIES, ConfigValueFactory.fromAnyRef("job" + (i - 1))); } JobSpec js = JobSpec.builder("test_job" + suffix).withVersion(suffix).withConfig(jobConfig). withTemplate(new URI("job" + suffix)).build(); SpecExecutor specExecutor = InMemorySpecExecutor.createDummySpecExecutor(new URI("job" + i)); JobExecutionPlan jobExecutionPlan = new JobExecutionPlan(js, specExecutor); jobExecutionPlans.add(jobExecutionPlan); } return new JobExecutionPlanDagFactory().createDag(jobExecutionPlans); }
@Override public Collection<Either<JobSpec, URI>> parseJobSpec(byte[] message) throws IOException { try { String messageString = new String(message, Charsets.UTF_8); List<Either<JobSpec, URI>> jobSpecs = Lists.newArrayList(); for (String oneInstruction : SPLITTER_COMMA.split(messageString)) { List<String> tokens = SPLITTER_COLON.splitToList(oneInstruction); if (tokens.get(0).equals(REMOVE)) { URI uri = new URI(tokens.get(1)); jobSpecs.add(Either.<JobSpec, URI>right(uri)); } else { URI uri = new URI(tokens.get(0)); String version = tokens.get(1); JobSpec jobSpec = new JobSpec.Builder(uri).withConfig(ConfigFactory.empty()).withVersion(version).build(); jobSpecs.add(Either.<JobSpec, URI>left(jobSpec)); } } return jobSpecs; } catch (URISyntaxException use) { throw new IOException(use); } }
@Override public Dag<JobExecutionPlan> compileFlow(Spec spec) { List<JobExecutionPlan> jobExecutionPlans = new ArrayList<>(); long flowExecutionId = System.currentTimeMillis(); int i = 0; while(i++ < NUMBER_OF_JOBS) { String specUri = "/foo/bar/spec/" + i; Properties properties = new Properties(); properties.put(ConfigurationKeys.FLOW_NAME_KEY, ((FlowSpec)spec).getConfigAsProperties().get(ConfigurationKeys.FLOW_NAME_KEY)); properties.put(ConfigurationKeys.FLOW_GROUP_KEY, ((FlowSpec)spec).getConfigAsProperties().get(ConfigurationKeys.FLOW_GROUP_KEY)); properties.put(ConfigurationKeys.JOB_NAME_KEY, ((FlowSpec)spec).getConfigAsProperties().get(ConfigurationKeys.FLOW_NAME_KEY) + "_" + i); properties.put(ConfigurationKeys.JOB_GROUP_KEY, ((FlowSpec)spec).getConfigAsProperties().get(ConfigurationKeys.FLOW_GROUP_KEY) + "_" + i); properties.put(ConfigurationKeys.FLOW_EXECUTION_ID_KEY, flowExecutionId); JobSpec jobSpec = JobSpec.builder(specUri) .withConfig(ConfigUtils.propertiesToConfig(properties)) .withVersion("1") .withDescription("Spec Description") .build(); jobExecutionPlans.add(new JobExecutionPlan(jobSpec, new InMemorySpecExecutor(ConfigFactory.empty()))); } return new JobExecutionPlanDagFactory().createDag(jobExecutionPlans); } }
/** * Create a {@link Dag<JobExecutionPlan>} with one parent and one child. * @return a Dag. */ public Dag<JobExecutionPlan> buildDag(String id, Long flowExecutionId) throws URISyntaxException { List<JobExecutionPlan> jobExecutionPlans = new ArrayList<>(); for (int i = 0; i < 2; i++) { String suffix = Integer.toString(i); Config jobConfig = ConfigBuilder.create(). addPrimitive(ConfigurationKeys.FLOW_GROUP_KEY, "group" + id). addPrimitive(ConfigurationKeys.FLOW_NAME_KEY, "flow" + id). addPrimitive(ConfigurationKeys.FLOW_EXECUTION_ID_KEY, flowExecutionId). addPrimitive(ConfigurationKeys.JOB_NAME_KEY, "job" + suffix).build(); if (i > 0) { jobConfig = jobConfig.withValue(ConfigurationKeys.JOB_DEPENDENCIES, ConfigValueFactory.fromAnyRef("job" + (i - 1))); } JobSpec js = JobSpec.builder("test_job" + suffix).withVersion(suffix).withConfig(jobConfig). withTemplate(new URI("job" + suffix)).build(); SpecExecutor specExecutor = new InMemorySpecExecutor(ConfigFactory.empty()); JobExecutionPlan jobExecutionPlan = new JobExecutionPlan(js, specExecutor); jobExecutionPlan.setExecutionStatus(ExecutionStatus.RUNNING); jobExecutionPlans.add(jobExecutionPlan); } return new JobExecutionPlanDagFactory().createDag(jobExecutionPlans); }
/** * Creates {@link JobSpec} from the {@link AvroJobSpec} record. * @param record the record as an {@link AvroJobSpec} * @return a {@link JobSpec} wrapped in a {@link Collection} of {@link Either} */ @Override public Collection<Either<JobSpec, URI>> parseJobSpec(AvroJobSpec record) { JobSpec.Builder jobSpecBuilder = JobSpec.builder(record.getUri()); Properties props = new Properties(); props.putAll(record.getProperties()); jobSpecBuilder.withJobCatalogURI(record.getUri()).withVersion(record.getVersion()) .withDescription(record.getDescription()).withConfigAsProperties(props).withMetadata(record.getMetadata()); if (!record.getTemplateUri().isEmpty()) { try { jobSpecBuilder.withTemplate(new URI(record.getTemplateUri())); } catch (URISyntaxException e) { log.error("could not parse template URI " + record.getTemplateUri()); } } JobSpec jobSpec = jobSpecBuilder.build(); log.info("Parsed job spec " + jobSpec.toString()); return Lists.newArrayList(Either.<JobSpec, URI>left(jobSpec)); }
@Test public void testUriAndVersion() { JobSpec js1_1 = JobSpec.builder("gobblin:/test/job1").withVersion("1").build(); JobSpec js1_2 = JobSpec.builder("gobblin:/test/job1").withVersion("2").build(); JobSpec js2_1 = JobSpec.builder("gobblin:/test/job2").withVersion("1").build(); JobSpec js2_2 = JobSpec.builder("gobblin:/test/job2").withVersion("2").build(); JobSpecFilter filter1 = JobSpecFilter.eqJobSpecURI("gobblin:/test/job1"); Assert.assertTrue(filter1.apply(js1_1)); Assert.assertTrue(filter1.apply(js1_2)); Assert.assertFalse(filter1.apply(js2_1)); Assert.assertFalse(filter1.apply(js2_2)); JobSpecFilter filter2 = JobSpecFilter.builder().eqURI("gobblin:/test/job2").eqVersion("2").build(); Assert.assertFalse(filter2.apply(js1_1)); Assert.assertFalse(filter2.apply(js1_2)); Assert.assertFalse(filter2.apply(js2_1)); Assert.assertTrue(filter2.apply(js2_2)); }
private JobSpec initJobSpec(String specUri) { Properties properties = new Properties(); return JobSpec.builder(specUri) .withConfig(ConfigUtils.propertiesToConfig(properties)) .withVersion("1") .withDescription("Spec Description") .build(); }
/** * {@inheritDoc} * * NOTE: For this callback only conditions on the URI and version will be used. * */ @Override public void onDeleteJob(URI deletedJobURI, String deletedJobVersion) { JobSpec fakeJobSpec = JobSpec.builder(deletedJobURI).withVersion(deletedJobVersion).build(); if (this.filter.apply(fakeJobSpec)) { this.delegate.onDeleteJob(deletedJobURI, deletedJobVersion); } }
private JobSpec initJobSpec(String specUri) { Properties properties = new Properties(); return JobSpec.builder(specUri) .withConfig(ConfigUtils.propertiesToConfig(properties)) .withVersion("1") .withDescription("Spec Description") .build(); }
@Override public Collection<Either<JobSpec, URI>> parseJobSpec(GobblinTrackingEvent message) { this.events.add(message); return Lists.newArrayList(Either.<JobSpec, URI>left(JobSpec.builder(message.getName()).build())); } }