/** * Get the JobContext with the related OutputFormat configuration populated given the alias * and the actual JobContext * @param alias the name given to the OutputFormat configuration * @param context the JobContext * @return a copy of the JobContext with the alias configuration populated */ public static JobContext getJobContext(String alias, JobContext context) { String aliasConf = context.getConfiguration().get(getAliasConfName(alias)); JobContext aliasContext = ShimLoader.getHadoopShims().getHCatShim().createJobContext( context.getConfiguration(), context.getJobID()); addToConfig(aliasConf, aliasContext.getConfiguration()); return aliasContext; }
@Override public void checkOutputSpecs(JobContext context) throws IOException, InterruptedException { for (String alias : getOutputFormatAliases(context)) { LOGGER.debug("Calling checkOutputSpecs for alias: " + alias); JobContext aliasContext = getJobContext(alias, context); OutputFormat<?, ?> outputFormat = getOutputFormatInstance(aliasContext); outputFormat.checkOutputSpecs(aliasContext); // Copy credentials and any new config added back to JobContext context.getCredentials().addAll(aliasContext.getCredentials()); setAliasConf(alias, context, aliasContext); } }
if (jobValue == null || !jobValue.equals(value)) { if (configsToMerge.containsKey(key)) { String mergedValue = getMergedConfValue(jobValue, value, configsToMerge.get(key)); userConf.set(key, mergedValue); } else { userConf.set(getAliasConfName(alias), builder.toString());
public MultiOutputCommitter(TaskAttemptContext context) throws IOException, InterruptedException { outputCommitters = new LinkedHashMap<String, MultiOutputFormat.BaseOutputCommitterContainer>(); String[] aliases = getOutputFormatAliases(context); for (String alias : aliases) { LOGGER.info("Creating output committer for alias: " + alias); TaskAttemptContext aliasContext = getTaskAttemptContext(alias, context); OutputCommitter baseCommitter = getOutputFormatInstance(aliasContext) .getOutputCommitter(aliasContext); outputCommitters.put(alias, new BaseOutputCommitterContainer(baseCommitter, aliasContext)); } }
public MultiRecordWriter(TaskAttemptContext context) throws IOException, InterruptedException { baseRecordWriters = new LinkedHashMap<String, BaseRecordWriterContainer>(); String[] aliases = getOutputFormatAliases(context); for (String alias : aliases) { LOGGER.info("Creating record writer for alias: " + alias); TaskAttemptContext aliasContext = getTaskAttemptContext(alias, context); Configuration aliasConf = aliasContext.getConfiguration(); // Create output directory if not already created. String outDir = aliasConf.get("mapred.output.dir"); if (outDir != null) { Path outputDir = new Path(outDir); FileSystem fs = outputDir.getFileSystem(aliasConf); if (!fs.exists(outputDir)) { fs.mkdirs(outputDir); } } OutputFormat<?, ?> outputFormat = getOutputFormatInstance(aliasContext); baseRecordWriters.put(alias, new BaseRecordWriterContainer(outputFormat.getRecordWriter(aliasContext), aliasContext)); } }
@Override public void checkOutputSpecs(JobContext context) throws IOException, InterruptedException { for (String alias : getOutputFormatAliases(context)) { LOGGER.debug("Calling checkOutputSpecs for alias: " + alias); JobContext aliasContext = getJobContext(alias, context); OutputFormat<?, ?> outputFormat = getOutputFormatInstance(aliasContext); outputFormat.checkOutputSpecs(aliasContext); // Copy credentials and any new config added back to JobContext context.getCredentials().addAll(aliasContext.getCredentials()); setAliasConf(alias, context, aliasContext); } }
/** * Get the TaskAttemptContext with the related OutputFormat configuration populated given the alias * and the actual TaskAttemptContext * @param alias the name given to the OutputFormat configuration * @param context the Mapper or Reducer Context * @return a copy of the TaskAttemptContext with the alias configuration populated */ public static TaskAttemptContext getTaskAttemptContext(String alias, TaskAttemptContext context) { String aliasConf = context.getConfiguration().get(getAliasConfName(alias)); TaskAttemptContext aliasContext = ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext( context.getConfiguration(), context.getTaskAttemptID()); addToConfig(aliasConf, aliasContext.getConfiguration()); return aliasContext; }
if (jobValue == null || !jobValue.equals(value)) { if (configsToMerge.containsKey(key)) { String mergedValue = getMergedConfValue(jobValue, value, configsToMerge.get(key)); userConf.set(key, mergedValue); } else { userConf.set(getAliasConfName(alias), builder.toString());
public MultiOutputCommitter(TaskAttemptContext context) throws IOException, InterruptedException { outputCommitters = new LinkedHashMap<String, MultiOutputFormat.BaseOutputCommitterContainer>(); String[] aliases = getOutputFormatAliases(context); for (String alias : aliases) { LOGGER.info("Creating output committer for alias: " + alias); TaskAttemptContext aliasContext = getTaskAttemptContext(alias, context); OutputCommitter baseCommitter = getOutputFormatInstance(aliasContext) .getOutputCommitter(aliasContext); outputCommitters.put(alias, new BaseOutputCommitterContainer(baseCommitter, aliasContext)); } }
@Override public void checkOutputSpecs(JobContext context) throws IOException, InterruptedException { for (String alias : getOutputFormatAliases(context)) { LOGGER.debug("Calling checkOutputSpecs for alias: " + alias); JobContext aliasContext = getJobContext(alias, context); OutputFormat<?, ?> outputFormat = getOutputFormatInstance(aliasContext); outputFormat.checkOutputSpecs(aliasContext); // Copy credentials and any new config added back to JobContext context.getCredentials().addAll(aliasContext.getCredentials()); setAliasConf(alias, context, aliasContext); } }
/** * Get the JobContext with the related OutputFormat configuration populated given the alias * and the actual JobContext * @param alias the name given to the OutputFormat configuration * @param context the JobContext * @return a copy of the JobContext with the alias configuration populated */ public static JobContext getJobContext(String alias, JobContext context) { String aliasConf = context.getConfiguration().get(getAliasConfName(alias)); JobContext aliasContext = ShimLoader.getHadoopShims().getHCatShim().createJobContext( context.getConfiguration(), context.getJobID()); addToConfig(aliasConf, aliasContext.getConfiguration()); return aliasContext; }
if (jobValue == null || !jobValue.equals(value)) { if (configsToMerge.containsKey(key)) { String mergedValue = getMergedConfValue(jobValue, value, configsToMerge.get(key)); userConf.set(key, mergedValue); } else { userConf.set(getAliasConfName(alias), builder.toString());
public MultiOutputCommitter(TaskAttemptContext context) throws IOException, InterruptedException { outputCommitters = new LinkedHashMap<String, MultiOutputFormat.BaseOutputCommitterContainer>(); String[] aliases = getOutputFormatAliases(context); for (String alias : aliases) { LOGGER.info("Creating output committer for alias: " + alias); TaskAttemptContext aliasContext = getTaskAttemptContext(alias, context); OutputCommitter baseCommitter = getOutputFormatInstance(aliasContext) .getOutputCommitter(aliasContext); outputCommitters.put(alias, new BaseOutputCommitterContainer(baseCommitter, aliasContext)); } }
@Override public void checkOutputSpecs(JobContext context) throws IOException, InterruptedException { for (String alias : getOutputFormatAliases(context)) { LOGGER.debug("Calling checkOutputSpecs for alias: " + alias); JobContext aliasContext = getJobContext(alias, context); OutputFormat<?, ?> outputFormat = getOutputFormatInstance(aliasContext); outputFormat.checkOutputSpecs(aliasContext); // Copy credentials and any new config added back to JobContext context.getCredentials().addAll(aliasContext.getCredentials()); setAliasConf(alias, context, aliasContext); } }
/** * Get the TaskAttemptContext with the related OutputFormat configuration populated given the alias * and the actual TaskAttemptContext * @param alias the name given to the OutputFormat configuration * @param context the Mapper or Reducer Context * @return a copy of the TaskAttemptContext with the alias configuration populated */ public static TaskAttemptContext getTaskAttemptContext(String alias, TaskAttemptContext context) { String aliasConf = context.getConfiguration().get(getAliasConfName(alias)); TaskAttemptContext aliasContext = ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext( context.getConfiguration(), context.getTaskAttemptID()); addToConfig(aliasConf, aliasContext.getConfiguration()); return aliasContext; }
if (jobValue == null || !jobValue.equals(value)) { if (configsToMerge.containsKey(key)) { String mergedValue = getMergedConfValue(jobValue, value, configsToMerge.get(key)); userConf.set(key, mergedValue); } else { userConf.set(getAliasConfName(alias), builder.toString());
public MultiOutputCommitter(TaskAttemptContext context) throws IOException, InterruptedException { outputCommitters = new LinkedHashMap<String, MultiOutputFormat.BaseOutputCommitterContainer>(); String[] aliases = getOutputFormatAliases(context); for (String alias : aliases) { LOGGER.info("Creating output committer for alias: " + alias); TaskAttemptContext aliasContext = getTaskAttemptContext(alias, context); OutputCommitter baseCommitter = getOutputFormatInstance(aliasContext) .getOutputCommitter(aliasContext); outputCommitters.put(alias, new BaseOutputCommitterContainer(baseCommitter, aliasContext)); } }
@Override public void checkOutputSpecs(JobContext context) throws IOException, InterruptedException { for (String alias : getOutputFormatAliases(context)) { LOGGER.debug("Calling checkOutputSpecs for alias: " + alias); JobContext aliasContext = getJobContext(alias, context); OutputFormat<?, ?> outputFormat = getOutputFormatInstance(aliasContext); outputFormat.checkOutputSpecs(aliasContext); // Copy credentials and any new config added back to JobContext context.getCredentials().addAll(aliasContext.getCredentials()); setAliasConf(alias, context, aliasContext); } }
/** * Get the TaskAttemptContext with the related OutputFormat configuration populated given the alias * and the actual TaskAttemptContext * @param alias the name given to the OutputFormat configuration * @param context the Mapper or Reducer Context * @return a copy of the TaskAttemptContext with the alias configuration populated */ public static TaskAttemptContext getTaskAttemptContext(String alias, TaskAttemptContext context) { String aliasConf = context.getConfiguration().get(getAliasConfName(alias)); TaskAttemptContext aliasContext = ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext( context.getConfiguration(), context.getTaskAttemptID()); addToConfig(aliasConf, aliasContext.getConfiguration()); return aliasContext; }
if (jobValue == null || !jobValue.equals(value)) { if (configsToMerge.containsKey(key)) { String mergedValue = getMergedConfValue(jobValue, value, configsToMerge.get(key)); userConf.set(key, mergedValue); } else { userConf.set(getAliasConfName(alias), builder.toString());