public static Pair<String, String> getJobNameAndId(Config config) { JobConfig jobConfig = new JobConfig(config); Option jobNameOption = jobConfig.getName(); if (jobNameOption.isEmpty()) { throw new ConfigException("Missing job name"); } String jobName = (String) jobNameOption.get(); return new ImmutablePair<>(jobName, jobConfig.getJobId()); }
public String getJobName() { return get(JobConfig.JOB_NAME(), null); }
public String getJobId() { return get(JobConfig.JOB_ID(), null); }
public GroupByContainerCount(Config config) { this.containerCount = new JobConfig(config).getContainerCount(); this.config = config; if (containerCount <= 0) throw new IllegalArgumentException("Must have at least one container"); }
private void validateJobConfig(Config expectedConfig, JobConfig jobConfig) { assertEquals(expectedConfig.get(JobConfig.JOB_NAME()), jobConfig.getName().get()); assertEquals(expectedConfig.get(JobConfig.JOB_ID()), jobConfig.getJobId()); assertEquals("testJobGraphJson", jobConfig.get(JobNodeConfigurationGenerator.CONFIG_INTERNAL_EXECUTION_PLAN)); assertEquals(expectedConfig.get(TaskConfig.INPUT_STREAMS()), jobConfig.get(TaskConfig.INPUT_STREAMS())); assertEquals(expectedConfig.get(TaskConfigJava.BROADCAST_INPUT_STREAMS), jobConfig.get(TaskConfigJava.BROADCAST_INPUT_STREAMS)); }
@Test public void testGetStatus() { Map m = new HashMap<String, String>(); m.put(JobConfig.JOB_NAME(), "jobName"); m.put(JobConfig.STREAM_JOB_FACTORY_CLASS(), MockStreamJobFactory.class.getName()); m.put(JobConfig.JOB_ID(), "newJob"); StreamApplication userApp = appDesc -> { }; runner = spy(new RemoteApplicationRunner(userApp, new MapConfig(m))); Assert.assertEquals(ApplicationStatus.New, runner.getApplicationStatus(new JobConfig(new MapConfig(m)))); m.put(JobConfig.JOB_ID(), "runningJob"); runner = spy(new RemoteApplicationRunner(userApp, new MapConfig(m))); Assert.assertEquals(ApplicationStatus.Running, runner.getApplicationStatus(new JobConfig(new MapConfig(m)))); }
@Before public void setUp() { defaultSerde = KVSerde.of(new StringSerde(), new JsonSerdeV2<>()); inputSystemDescriptor = new GenericSystemDescriptor("input-system", "mockSystemFactoryClassName"); outputSystemDescriptor = new GenericSystemDescriptor("output-system", "mockSystemFactoryClassName"); intermediateSystemDescriptor = new GenericSystemDescriptor("intermediate-system", "mockSystemFactoryClassName"); input1Descriptor = inputSystemDescriptor.getInputDescriptor("input1", defaultSerde); input2Descriptor = inputSystemDescriptor.getInputDescriptor("input2", defaultSerde); outputDescriptor = outputSystemDescriptor.getOutputDescriptor("output", defaultSerde); intermediateInputDescriptor = intermediateSystemDescriptor.getInputDescriptor("jobName-jobId-partition_by-p1", defaultSerde) .withPhysicalName("jobName-jobId-partition_by-p1"); intermediateOutputDescriptor = intermediateSystemDescriptor.getOutputDescriptor("jobName-jobId-partition_by-p1", defaultSerde) .withPhysicalName("jobName-jobId-partition_by-p1"); broadcastInputDesriptor = intermediateSystemDescriptor.getInputDescriptor("jobName-jobId-broadcast-b1", defaultSerde) .withPhysicalName("jobName-jobId-broadcast-b1"); Map<String, String> configs = new HashMap<>(); configs.put(JobConfig.JOB_NAME(), "jobName"); configs.put(JobConfig.JOB_ID(), "jobId"); configs.putAll(input1Descriptor.toConfig()); configs.putAll(input2Descriptor.toConfig()); configs.putAll(outputDescriptor.toConfig()); configs.putAll(inputSystemDescriptor.toConfig()); configs.putAll(outputSystemDescriptor.toConfig()); configs.putAll(intermediateSystemDescriptor.toConfig()); configs.put(JobConfig.JOB_DEFAULT_SYSTEM(), intermediateSystemDescriptor.getSystemName()); mockConfig = spy(new MapConfig(configs)); mockStreamAppDesc = new StreamApplicationDescriptorImpl(getRepartitionJoinStreamApplication(), mockConfig); }
static Map<String, String> fetchSamzaSqlConfig(int execId, ExecutionContext executionContext) { HashMap<String, String> staticConfigs = new HashMap<>(); staticConfigs.put(JobConfig.JOB_NAME(), "sql-job-" + execId); staticConfigs.put(JobConfig.PROCESSOR_ID(), String.valueOf(execId)); staticConfigs.put(JobCoordinatorConfig.JOB_COORDINATOR_FACTORY, PassthroughJobCoordinatorFactory.class.getName()); staticConfigs.put(TaskConfig.GROUPER_FACTORY(), SingleContainerGrouperFactory.class.getName());
/** * Gets a SystemStreamPartitionGrouper object from the configuration. */ private SystemStreamPartitionGrouper getSystemStreamPartitionGrouper() { JobConfig jobConfig = new JobConfig(config); String factoryString = jobConfig.getSystemStreamPartitionGrouperFactory(); SystemStreamPartitionGrouper grouper = Util.getObj(factoryString, SystemStreamPartitionGrouperFactory.class) .getSystemStreamPartitionGrouper(jobConfig); return grouper; }
private Map<String, String> generateSystemStreamConfigs(ApplicationDescriptorImpl<? extends ApplicationDescriptor> appDesc) { Map<String, String> systemStreamConfigs = new HashMap<>(); appDesc.getInputDescriptors().forEach((key, value) -> systemStreamConfigs.putAll(value.toConfig())); appDesc.getOutputDescriptors().forEach((key, value) -> systemStreamConfigs.putAll(value.toConfig())); appDesc.getSystemDescriptors().forEach(sd -> systemStreamConfigs.putAll(sd.toConfig())); appDesc.getDefaultSystemDescriptor().ifPresent(dsd -> systemStreamConfigs.put(JobConfig.JOB_DEFAULT_SYSTEM(), dsd.getSystemName())); return systemStreamConfigs; } }
/* package private */ ApplicationStatus getApplicationStatus(JobConfig jobConfig) { JobRunner runner = new JobRunner(jobConfig); ApplicationStatus status = runner.status(); LOG.debug("Status is {} for job {}", new Object[]{status, jobConfig.getName()}); return status; } }
/** * Creates a instance of {@link SystemStreamPartitionMapper} using the stream partition expansion factory class * defined in the {@param config}. * @param config the configuration of the samza job. * @return the instantiated {@link SystemStreamPartitionMapper} object. */ private SystemStreamPartitionMapper getSystemStreamPartitionMapper(Config config) { JobConfig jobConfig = new JobConfig(config); String systemStreamPartitionMapperClass = jobConfig.getSystemStreamPartitionMapperFactoryName(); SystemStreamPartitionMapperFactory systemStreamPartitionMapperFactory = Util.getObj(systemStreamPartitionMapperClass, SystemStreamPartitionMapperFactory.class); return systemStreamPartitionMapperFactory.getStreamPartitionMapper(config, new MetricsRegistryMap()); }
public PassthroughJobCoordinator(String processorId, Config config, MetricsRegistry metricsRegistry) { this.processorId = processorId; this.config = config; LocationIdProviderFactory locationIdProviderFactory = Util.getObj(new JobConfig(config).getLocationIdProviderFactory(), LocationIdProviderFactory.class); LocationIdProvider locationIdProvider = locationIdProviderFactory.getLocationIdProvider(config); this.locationId = locationIdProvider.getLocationId(); }
ContainerProcessManager(Config config, SamzaApplicationState state, MetricsRegistryMap registry, AbstractContainerAllocator allocator, ClusterResourceManager manager) { this.state = state; this.clusterManagerConfig = new ClusterManagerConfig(config); this.jobConfig = new JobConfig(config); this.hostAffinityEnabled = clusterManagerConfig.getHostAffinityEnabled(); this.clusterResourceManager = manager; this.metrics = new ContainerProcessManagerMetrics(config, state, registry); this.containerAllocator = allocator; this.allocatorThread = new Thread(this.containerAllocator, "Container Allocator Thread"); }
configs.put(JobConfig.JOB_NAME(), "test-app"); configs.put(JobConfig.JOB_DEFAULT_SYSTEM(), "test-system"); StreamTestUtils.addStreamConfigs(configs, streamId0, system, streamId0); StreamTestUtils.addStreamConfigs(configs, streamId1, system, streamId1);
@Override public TaskNameGrouper build(Config config) { return new SingleContainerGrouper(config.get(JobConfig.PROCESSOR_ID())); } }
@Override public ApplicationStatus getStatus() { String jobId = c.getJobId(); switch (jobId) { case "newJob": return ApplicationStatus.New; case "runningJob": return ApplicationStatus.Running; default: return ApplicationStatus.UnsuccessfulFinish; } } };