@Override public void addInput(Input input) { addInput(input, null); }
/** * Calls the destroy method of {@link ProgramLifecycle}. */ private void destroy() { TransactionControl defaultTxControl = context.getDefaultTxControl(); TransactionControl txControl = mapReduce instanceof ProgramLifecycle ? Transactions.getTransactionControl(defaultTxControl, MapReduce.class, mapReduce, "destroy") : defaultTxControl; context.destroyProgram(programLifecycle, txControl, false); if (emitFieldLineage()) { try { FieldLineageInfo info = new FieldLineageInfo(context.getFieldLineageOperations()); fieldLineageWriter.write(mapReduceRunId, info); } catch (Throwable t) { LOG.warn("Failed to emit the field lineage operations for MapReduce {}", mapReduceRunId, t); } } }
context.getProgram().getClassLoader(), context.getApplicationSpecification().getPlugins(), context.getPluginInstantiator()); cleanupTask = createCleanupTask(classLoader, cleanupTask); context.setMapReduceClassLoader(classLoader); context.setJob(job); mapredConf.setClassLoader(context.getProgramInvocationClassLoader()); TaskType.MAP.configure(mapredConf, cConf, context.getMapperRuntimeArguments(), context.getMapperResources()); TaskType.REDUCE.configure(mapredConf, cConf, context.getReducerRuntimeArguments(), context.getReducerResources()); ClassLoader oldClassLoader = ClassLoaders.setContextClassLoader(context.getProgramInvocationClassLoader()); try { job.submit();
NamespaceQueryAdmin namespaceQueryAdmin) { super(program, programOptions, cConf, spec.getDataSets(), dsFramework, txClient, discoveryServiceClient, false, metricsCollectionService, createMetricsTags(workflowProgramInfo), secureStore, secureStoreManager, messagingService, pluginInstantiator, metadataReader, metadataPublisher, namespaceQueryAdmin); this.loggingContext = createLoggingContext(program.getId(), getRunId(), workflowProgramInfo); this.spec = spec; this.mapperResources = SystemArguments.getResources(getMapperRuntimeArguments(), spec.getMapperResources()); this.reducerResources = SystemArguments.getResources(getReducerRuntimeArguments(), spec.getReducerResources()); this.pluginArchive = pluginArchive; this.resourcesToLocalize = new HashMap<>(); addInput(Input.ofDataset(spec.getInputDataSet())); addOutput(Output.ofDataset(spec.getOutputDataSet()));
/** * Updates the {@link Configuration} of this class with the given parameters. * * @param context the context for the MapReduce program * @param conf the CDAP configuration * @param programJarURI The URI of the program JAR * @param localizedUserResources the localized resources for the MapReduce program */ public void set(BasicMapReduceContext context, CConfiguration conf, URI programJarURI, Map<String, String> localizedUserResources) { setProgramOptions(context.getProgramOptions()); setProgramId(context.getProgram().getId()); setApplicationSpecification(context.getApplicationSpecification()); setWorkflowProgramInfo(context.getWorkflowInfo()); setPlugins(context.getApplicationSpecification().getPlugins()); setProgramJarURI(programJarURI); setConf(conf); setLocalizedResources(localizedUserResources); setOutputs(context.getOutputs()); }
context.setState(new ProgramState(ProgramStatus.INITIALIZING, null)); TransactionControl defaultTxControl = context.getDefaultTxControl(); TransactionControl txControl = mapReduce instanceof AbstractMapReduce ? Transactions.getTransactionControl(defaultTxControl, AbstractMapReduce.class, : defaultTxControl; context.initializeProgram(programLifecycle, txControl, false); context.setState(new ProgramState(ProgramStatus.RUNNING, null)); ClassLoader oldClassLoader = ClassLoaders.setContextClassLoader(context.getProgramInvocationClassLoader()); try {
new BasicMapReduceContext(program, options, cConf, spec, workflowInfo, discoveryServiceClient, metricsCollectionService, txSystemClient, programDatasetFramework, getPluginArchive(options), pluginInstantiator, secureStore, secureStoreManager, new PropertyFieldSetter(context.getSpecification().getProperties()), new MetricsFieldSetter(context.getMetrics()), new DataSetFieldSetter(context)); LoggingContextAccessor.setLoggingContext(context.getLoggingContext());
@Override public void addInput(Input input, @Nullable Class<?> mapperCls) { if (input.getNamespace() != null && input.getNamespace().equals(NamespaceId.SYSTEM.getNamespace()) && !getProgram().getNamespaceId().equals(NamespaceId.SYSTEM.getNamespace())) { // trying to access system namespace from a program outside system namespace is not allowed throw new IllegalArgumentException(String.format("Accessing Input %s in system namespace " + "is not allowed from the namespace %s", input.getName(), getProgram().getNamespaceId())); } if (input instanceof Input.DatasetInput) { Input.DatasetInput datasetInput = (Input.DatasetInput) input; Input.InputFormatProviderInput createdInput = createInput(datasetInput); addInput(createdInput.getAlias(), createdInput.getInputFormatProvider(), mapperCls); } else if (input instanceof Input.InputFormatProviderInput) { addInput(input.getAlias(), ((Input.InputFormatProviderInput) input).getInputFormatProvider(), mapperCls); } else { // shouldn't happen unless user defines their own Input class throw new IllegalArgumentException(String.format("Input %s has unknown input class %s", input.getName(), input.getClass().getCanonicalName())); } }
private Input.InputFormatProviderInput createInput(Input.DatasetInput datasetInput) { String datasetName = datasetInput.getName(); Map<String, String> datasetArgs = datasetInput.getArguments(); // keep track of the original alias to set it on the created Input before returning it String originalAlias = datasetInput.getAlias(); Dataset dataset; if (datasetInput.getNamespace() == null) { dataset = getDataset(datasetName, datasetArgs, AccessType.READ); } else { dataset = getDataset(datasetInput.getNamespace(), datasetName, datasetArgs, AccessType.READ); } DatasetInputFormatProvider datasetInputFormatProvider = new DatasetInputFormatProvider(datasetInput.getNamespace(), datasetName, datasetArgs, dataset, datasetInput.getSplits(), MapReduceBatchReadableInputFormat.class); return (Input.InputFormatProviderInput) Input.of(datasetName, datasetInputFormatProvider).alias(originalAlias); }
context.getProgram().getClassLoader(), context.getApplicationSpecification().getPlugins(), context.getPluginInstantiator()); cleanupTask = createCleanupTask(classLoader, cleanupTask); context.setMapReduceClassLoader(classLoader); context.setJob(job); mapredConf.setClassLoader(context.getProgramInvocationClassLoader()); TaskType.MAP.configure(mapredConf, cConf, context.getMapperRuntimeArguments(), context.getMapperResources()); TaskType.REDUCE.configure(mapredConf, cConf, context.getReducerRuntimeArguments(), context.getReducerResources()); ClassLoader oldClassLoader = ClassLoaders.setContextClassLoader(context.getProgramInvocationClassLoader()); try { job.submit();
MetadataPublisher metadataPublisher) { super(program, programOptions, cConf, spec.getDataSets(), dsFramework, txClient, discoveryServiceClient, false, metricsCollectionService, createMetricsTags(workflowProgramInfo), secureStore, secureStoreManager, messagingService, pluginInstantiator, metadataReader, metadataPublisher); this.loggingContext = createLoggingContext(program.getId(), getRunId(), workflowProgramInfo); this.spec = spec; this.mapperResources = SystemArguments.getResources(getMapperRuntimeArguments(), spec.getMapperResources()); this.reducerResources = SystemArguments.getResources(getReducerRuntimeArguments(), spec.getReducerResources()); this.streamAdmin = streamAdmin; this.pluginArchive = pluginArchive; addInput(Input.ofDataset(spec.getInputDataSet())); addOutput(Output.ofDataset(spec.getOutputDataSet()));
/** * Updates the {@link Configuration} of this class with the given parameters. * * @param context the context for the MapReduce program * @param conf the CDAP configuration * @param programJarURI The URI of the program JAR * @param localizedUserResources the localized resources for the MapReduce program */ public void set(BasicMapReduceContext context, CConfiguration conf, URI programJarURI, Map<String, String> localizedUserResources) { setProgramOptions(context.getProgramOptions()); setProgramId(context.getProgram().getId()); setApplicationSpecification(context.getApplicationSpecification()); setWorkflowProgramInfo(context.getWorkflowInfo()); setPlugins(context.getApplicationSpecification().getPlugins()); setProgramJarURI(programJarURI); setConf(conf); setLocalizedResources(localizedUserResources); setOutputs(context.getOutputs()); }
context.setState(new ProgramState(ProgramStatus.INITIALIZING, null)); TransactionControl defaultTxControl = context.getDefaultTxControl(); TransactionControl txControl = mapReduce instanceof AbstractMapReduce ? Transactions.getTransactionControl(defaultTxControl, AbstractMapReduce.class, : defaultTxControl; context.initializeProgram(programLifecycle, txControl, false); context.setState(new ProgramState(ProgramStatus.RUNNING, null)); ClassLoader oldClassLoader = ClassLoaders.setContextClassLoader(context.getProgramInvocationClassLoader()); try {
new BasicMapReduceContext(program, options, cConf, spec, workflowInfo, discoveryServiceClient, metricsCollectionService, txSystemClient, programDatasetFramework, streamAdmin, getPluginArchive(options), pluginInstantiator, secureStore, secureStoreManager, new PropertyFieldSetter(context.getSpecification().getProperties()), new MetricsFieldSetter(context.getMetrics()), new DataSetFieldSetter(context)); LoggingContextAccessor.setLoggingContext(context.getLoggingContext());
@Override public void addInput(Input input, @Nullable Class<?> mapperCls) { if (input.getNamespace() != null && input.getNamespace().equals(NamespaceId.SYSTEM.getNamespace()) && !getProgram().getNamespaceId().equals(NamespaceId.SYSTEM.getNamespace())) { // trying to access system namespace from a program outside system namespace is not allowed throw new IllegalArgumentException(String.format("Accessing Input %s in system namespace " + "is not allowed from the namespace %s", input.getName(), getProgram().getNamespaceId())); } if (input instanceof Input.DatasetInput) { Input.DatasetInput datasetInput = (Input.DatasetInput) input; Input.InputFormatProviderInput createdInput = createInput(datasetInput); addInput(createdInput.getAlias(), createdInput.getInputFormatProvider(), mapperCls); } else if (input instanceof Input.StreamInput) { Input.StreamInput streamInput = (Input.StreamInput) input; String namespace = streamInput.getNamespace(); if (namespace == null) { namespace = getProgram().getNamespaceId(); } addInput(input.getAlias(), new StreamInputFormatProvider(new NamespaceId(namespace), streamInput, streamAdmin), mapperCls); } else if (input instanceof Input.InputFormatProviderInput) { addInput(input.getAlias(), ((Input.InputFormatProviderInput) input).getInputFormatProvider(), mapperCls); } else { // shouldn't happen unless user defines their own Input class throw new IllegalArgumentException(String.format("Input %s has unknown input class %s", input.getName(), input.getClass().getCanonicalName())); } }
private Input.InputFormatProviderInput createInput(Input.DatasetInput datasetInput) { String datasetName = datasetInput.getName(); Map<String, String> datasetArgs = datasetInput.getArguments(); // keep track of the original alias to set it on the created Input before returning it String originalAlias = datasetInput.getAlias(); Dataset dataset; if (datasetInput.getNamespace() == null) { dataset = getDataset(datasetName, datasetArgs, AccessType.READ); } else { dataset = getDataset(datasetInput.getNamespace(), datasetName, datasetArgs, AccessType.READ); } DatasetInputFormatProvider datasetInputFormatProvider = new DatasetInputFormatProvider(datasetInput.getNamespace(), datasetName, datasetArgs, dataset, datasetInput.getSplits(), MapReduceBatchReadableInputFormat.class); return (Input.InputFormatProviderInput) Input.of(datasetName, datasetInputFormatProvider).alias(originalAlias); }
/** * Calls the destroy method of {@link ProgramLifecycle}. */ private void destroy() { TransactionControl defaultTxControl = context.getDefaultTxControl(); TransactionControl txControl = mapReduce instanceof ProgramLifecycle ? Transactions.getTransactionControl(defaultTxControl, MapReduce.class, mapReduce, "destroy") : defaultTxControl; context.destroyProgram(programLifecycle, txControl, false); if (emitFieldLineage()) { try { FieldLineageInfo info = new FieldLineageInfo(context.getFieldLineageOperations()); fieldLineageWriter.write(mapReduceRunId, info); } catch (Throwable t) { LOG.warn("Failed to emit the field lineage operations for MapReduce {}", mapReduceRunId, t); } } }
@Override public void addInput(Input input) { addInput(input, null); }