@Override public Map<String, SparkSpecification> getSpark() { return delegate.getSpark(); }
@Override public Map<String, SparkSpecification> getSpark() { return delegate.getSpark(); }
private static SparkSpecification getSparkSpecification(Program program) { SparkSpecification spec = program.getApplicationSpecification().getSpark().get(program.getName()); // Spec shouldn't be null, otherwise the spark program won't even get started Preconditions.checkState(spec != null, "SparkSpecification not found for %s", program.getId()); return spec; }
private Iterable<ProgramSpecification> getProgramSpecs(ApplicationId appId) { ApplicationSpecification appSpec = store.getApplication(appId); return Iterables.concat(appSpec.getMapReduce().values(), appSpec.getServices().values(), appSpec.getSpark().values(), appSpec.getWorkers().values(), appSpec.getWorkflows().values()); }
private static SparkSpecification getSparkSpecification(Program program) { SparkSpecification spec = program.getApplicationSpecification().getSpark().get(program.getName()); // Spec shouldn't be null, otherwise the spark program won't even get started Preconditions.checkState(spec != null, "SparkSpecification not found for %s", program.getId()); return spec; }
private static SparkSpecification getSparkSpecification(Program program) { SparkSpecification spec = program.getApplicationSpecification().getSpark().get(program.getName()); // Spec shouldn't be null, otherwise the spark program won't even get started Preconditions.checkState(spec != null, "SparkSpecification not found for %s", program.getId()); return spec; }
private Iterable<ProgramSpecification> getProgramSpecs(ApplicationId appId) { ApplicationSpecification appSpec = store.getApplication(appId); return Iterables.concat(appSpec.getFlows().values(), appSpec.getMapReduce().values(), appSpec.getServices().values(), appSpec.getSpark().values(), appSpec.getWorkers().values(), appSpec.getWorkflows().values()); }
private Set<ProgramId> getAllPrograms(ApplicationId appId, ApplicationSpecification appSpec) { Set<ProgramId> result = new HashSet<>(); result.addAll(getProgramsWithType(appId, ProgramType.MAPREDUCE, appSpec.getMapReduce())); result.addAll(getProgramsWithType(appId, ProgramType.WORKFLOW, appSpec.getWorkflows())); result.addAll(getProgramsWithType(appId, ProgramType.SERVICE, appSpec.getServices())); result.addAll(getProgramsWithType(appId, ProgramType.SPARK, appSpec.getSpark())); result.addAll(getProgramsWithType(appId, ProgramType.WORKER, appSpec.getWorkers())); return result; }
private Set<ProgramId> getAllPrograms(ApplicationId appId, ApplicationSpecification appSpec) { Set<ProgramId> result = new HashSet<>(); result.addAll(getProgramsWithType(appId, ProgramType.FLOW, appSpec.getFlows())); result.addAll(getProgramsWithType(appId, ProgramType.MAPREDUCE, appSpec.getMapReduce())); result.addAll(getProgramsWithType(appId, ProgramType.WORKFLOW, appSpec.getWorkflows())); result.addAll(getProgramsWithType(appId, ProgramType.SERVICE, appSpec.getServices())); result.addAll(getProgramsWithType(appId, ProgramType.SPARK, appSpec.getSpark())); result.addAll(getProgramsWithType(appId, ProgramType.WORKER, appSpec.getWorkers())); return result; }
private void addPrograms(ImmutableMap.Builder<String, String> properties) { addPrograms(ProgramType.MAPREDUCE, appSpec.getMapReduce().values(), properties); addPrograms(ProgramType.SERVICE, appSpec.getServices().values(), properties); addPrograms(ProgramType.SPARK, appSpec.getSpark().values(), properties); addPrograms(ProgramType.WORKER, appSpec.getWorkers().values(), properties); addPrograms(ProgramType.WORKFLOW, appSpec.getWorkflows().values(), properties); }
private void addPrograms(ImmutableMap.Builder<String, String> properties) { addPrograms(ProgramType.FLOW, appSpec.getFlows().values(), properties); addPrograms(ProgramType.MAPREDUCE, appSpec.getMapReduce().values(), properties); addPrograms(ProgramType.SERVICE, appSpec.getServices().values(), properties); addPrograms(ProgramType.SPARK, appSpec.getSpark().values(), properties); addPrograms(ProgramType.WORKER, appSpec.getWorkers().values(), properties); addPrograms(ProgramType.WORKFLOW, appSpec.getWorkflows().values(), properties); }
@Override protected void validateOptions(Program program, ProgramOptions options) { super.validateOptions(program, options); // Extract and verify parameters ApplicationSpecification appSpec = program.getApplicationSpecification(); Preconditions.checkNotNull(appSpec, "Missing application specification for %s", program.getId()); ProgramType processorType = program.getType(); Preconditions.checkNotNull(processorType, "Missing processor type for %s", program.getId()); Preconditions.checkArgument(processorType == ProgramType.SPARK, "Only SPARK process type is supported. Program type is %s for %s", processorType, program.getId()); SparkSpecification spec = appSpec.getSpark().get(program.getName()); Preconditions.checkNotNull(spec, "Missing SparkSpecification for %s", program.getId()); }
@Override protected void validateOptions(Program program, ProgramOptions options) { super.validateOptions(program, options); // Extract and verify parameters ApplicationSpecification appSpec = program.getApplicationSpecification(); Preconditions.checkNotNull(appSpec, "Missing application specification for %s", program.getId()); ProgramType processorType = program.getType(); Preconditions.checkNotNull(processorType, "Missing processor type for %s", program.getId()); Preconditions.checkArgument(processorType == ProgramType.SPARK, "Only SPARK process type is supported. Program type is %s for %s", processorType, program.getId()); SparkSpecification spec = appSpec.getSpark().get(program.getName()); Preconditions.checkNotNull(spec, "Missing SparkSpecification for %s", program.getId()); }
@Override protected void validateOptions(Program program, ProgramOptions options) { super.validateOptions(program, options); // Extract and verify parameters ApplicationSpecification appSpec = program.getApplicationSpecification(); Preconditions.checkNotNull(appSpec, "Missing application specification for %s", program.getId()); ProgramType processorType = program.getType(); Preconditions.checkNotNull(processorType, "Missing processor type for %s", program.getId()); Preconditions.checkArgument(processorType == ProgramType.SPARK, "Only SPARK process type is supported. Program type is %s for %s", processorType, program.getId()); SparkSpecification spec = appSpec.getSpark().get(program.getName()); Preconditions.checkNotNull(spec, "Missing SparkSpecification for %s", program.getId()); }
private boolean programExists(ProgramId id, ApplicationSpecification appSpec) { switch (id.getType()) { case MAPREDUCE: return appSpec.getMapReduce().containsKey(id.getProgram()); case SERVICE: return appSpec.getServices().containsKey(id.getProgram()); case SPARK: return appSpec.getSpark().containsKey(id.getProgram()); case WORKER: return appSpec.getWorkers().containsKey(id.getProgram()); case WORKFLOW: return appSpec.getWorkflows().containsKey(id.getProgram()); default: throw new IllegalArgumentException("Unexpected ProgramType " + id.getType()); } }
private boolean programExists(ProgramId id, ApplicationSpecification appSpec) { switch (id.getType()) { case FLOW: return appSpec.getFlows().containsKey(id.getProgram()); case MAPREDUCE: return appSpec.getMapReduce().containsKey(id.getProgram()); case SERVICE: return appSpec.getServices().containsKey(id.getProgram()); case SPARK: return appSpec.getSpark().containsKey(id.getProgram()); case WORKER: return appSpec.getWorkers().containsKey(id.getProgram()); case WORKFLOW: return appSpec.getWorkflows().containsKey(id.getProgram()); default: throw new IllegalArgumentException("Unexpected ProgramType " + id.getType()); } }
@Override public void process(final ApplicationDeployable input) throws Exception { List<ProgramDescriptor> programDescriptors = new ArrayList<>(); final ApplicationSpecification appSpec = input.getSpecification(); // Now, we iterate through all ProgramSpecification and generate programs Iterable<ProgramSpecification> specifications = Iterables.concat( appSpec.getMapReduce().values(), appSpec.getFlows().values(), appSpec.getWorkflows().values(), appSpec.getServices().values(), appSpec.getSpark().values(), appSpec.getWorkers().values() ); for (ProgramSpecification spec: specifications) { ProgramType type = ProgramTypes.fromSpecification(spec); ProgramId programId = input.getApplicationId().program(type, spec.getName()); programDescriptors.add(new ProgramDescriptor(programId, appSpec)); } emit(new ApplicationWithPrograms(input, programDescriptors)); } }
@Override public void process(final ApplicationDeployable input) throws Exception { List<ProgramDescriptor> programDescriptors = new ArrayList<>(); final ApplicationSpecification appSpec = input.getSpecification(); // Now, we iterate through all ProgramSpecification and generate programs Iterable<ProgramSpecification> specifications = Iterables.concat( appSpec.getMapReduce().values(), appSpec.getWorkflows().values(), appSpec.getServices().values(), appSpec.getSpark().values(), appSpec.getWorkers().values() ); for (ProgramSpecification spec: specifications) { ProgramType type = ProgramTypes.fromSpecification(spec); ProgramId programId = input.getApplicationId().program(type, spec.getName()); programDescriptors.add(new ProgramDescriptor(programId, appSpec)); } emit(new ApplicationWithPrograms(input, programDescriptors)); } }
@Override public void process(ApplicationWithPrograms input) { // use current time as creation time for app and all programs creationTime = String.valueOf(System.currentTimeMillis()); // add system metadata for apps ApplicationId appId = input.getApplicationId(); ApplicationSpecification appSpec = input.getSpecification(); new AppSystemMetadataWriter(metadataPublisher, appId, appSpec, creationTime).write(); // add system metadata for programs writeProgramSystemMetadata(appId, ProgramType.MAPREDUCE, appSpec.getMapReduce().values()); writeProgramSystemMetadata(appId, ProgramType.SERVICE, appSpec.getServices().values()); writeProgramSystemMetadata(appId, ProgramType.SPARK, appSpec.getSpark().values()); writeProgramSystemMetadata(appId, ProgramType.WORKER, appSpec.getWorkers().values()); writeProgramSystemMetadata(appId, ProgramType.WORKFLOW, appSpec.getWorkflows().values()); // Emit input to the next stage emit(input); }
@Override public void process(ApplicationWithPrograms input) throws Exception { // add system metadata for apps ApplicationId appId = input.getApplicationId(); ApplicationSpecification appSpec = input.getSpecification(); // only update creation time if this is a new app Map<String, String> properties = metadataStore.getProperties(MetadataScope.SYSTEM, appId.toMetadataEntity()); SystemMetadataWriter appSystemMetadataWriter = new AppSystemMetadataWriter(metadataStore, appId, appSpec, !properties.isEmpty()); appSystemMetadataWriter.write(); // add system metadata for programs writeProgramSystemMetadata(appId, ProgramType.FLOW, appSpec.getFlows().values()); writeProgramSystemMetadata(appId, ProgramType.MAPREDUCE, appSpec.getMapReduce().values()); writeProgramSystemMetadata(appId, ProgramType.SERVICE, appSpec.getServices().values()); writeProgramSystemMetadata(appId, ProgramType.SPARK, appSpec.getSpark().values()); writeProgramSystemMetadata(appId, ProgramType.WORKER, appSpec.getWorkers().values()); writeProgramSystemMetadata(appId, ProgramType.WORKFLOW, appSpec.getWorkflows().values()); // Emit input to the next stage emit(input); }