@SuppressWarnings("deprecation") private TaskAttemptContext getContext(String nameOutput) throws IOException { TaskAttemptContext taskContext = taskContexts.get(nameOutput); if (taskContext != null) { return taskContext; } // The following trick leverages the instantiation of a record writer via // the job thus supporting arbitrary output formats. Job job = new Job(context.getConfiguration()); job.setOutputFormatClass(getNamedOutputFormatClass(context, nameOutput)); Schema keySchema=null,valSchema=null; if (job.getConfiguration().get(MO_PREFIX + nameOutput + ".keyschema",null) != null) keySchema = Schema.parse(job.getConfiguration().get( MO_PREFIX + nameOutput + ".keyschema")); if (job.getConfiguration().get(MO_PREFIX + nameOutput + ".valueschema", null) != null) valSchema = Schema.parse(job.getConfiguration().get( MO_PREFIX + nameOutput + ".valueschema")); setSchema(job, keySchema, valSchema); taskContext = createTaskAttemptContext( job.getConfiguration(), context.getTaskAttemptID()); taskContexts.put(nameOutput, taskContext); return taskContext; }
/** * Write key value to an output file name. * * Gets the record writer from job's output format. Job's output format should * be a FileOutputFormat. * * @param key the key * @param value the value * @param keySchema keySchema to use * @param valSchema ValueSchema to use * @param baseOutputPath base-output path to write the record to. Note: Framework will * generate unique filename for the baseOutputPath */ @SuppressWarnings("unchecked") public void write(Object key, Object value, Schema keySchema, Schema valSchema, String baseOutputPath) throws IOException, InterruptedException { checkBaseOutputPath(baseOutputPath); Job job = new Job(context.getConfiguration()); setSchema(job, keySchema, valSchema); TaskAttemptContext taskContext = createTaskAttemptContext(job.getConfiguration(), context.getTaskAttemptID()); getRecordWriter(taskContext, baseOutputPath).write(key, value); }
protected Configuration getConfiguration() { if (context != null) { return context.getConfiguration(); } else if (testConf != null) { return testConf; } return null; }
/** * Creates and initializes multiple outputs support, * it should be instantiated in the Mapper/Reducer setup method. * * @param context the TaskInputOutputContext object */ public CrunchOutputs(TaskInputOutputContext<?, ?, K, V> context) { this(context.getConfiguration()); this.baseContext = context; }
protected Configuration getConfiguration() { if (conf != null) { return conf; } else if (context != null) { return context.getConfiguration(); } else { return null; } }
public boolean isDebugRun() { Configuration conf = taskContext.getConfiguration(); return conf.getBoolean(RuntimeParameters.DEBUG, false); }
public boolean isDebugRun() { Configuration conf = taskContext.getConfiguration(); return conf.getBoolean(RuntimeParameters.DEBUG, false); }
public boolean isDebugRun() { Configuration conf = taskContext.getConfiguration(); return conf.getBoolean(RuntimeParameters.DEBUG, false); }
public MockMultipleOutputs(TaskInputOutputContext context) { super(context); configuration = new Configuration(context.getConfiguration()); namedOutputCollectorMap = new HashMap<String, MockOutputCollector>(); pathCollectorMap = new HashMap<String, MockOutputCollector>(); }
public PassThroughMetricsStore(ContextWriter<OK,OV> contextWriter, TaskInputOutputContext<?,?,OK,OV> context) { this.contextWriter = contextWriter; this.context = context; this.table = new Text(MetricsConfiguration.getTable(context.getConfiguration())); }
/** * Constructs a new implementation of {@link FijiContext}. * * @param context is the Hadoop {@link TaskInputOutputContext} that will back the new * {@link FijiContext} * @throws IOException on I/O error. */ protected InternalFijiContext(TaskInputOutputContext context) throws IOException { mHadoopContext = context; mKeyValueStoreFactory = KeyValueStoreReaderFactory.create(context.getConfiguration()); }
private static Map<String, ResultOutput<?>> prepareSinks(TaskInputOutputContext<?, ?, ?, ?> context) { assert context != null; Map<String, ResultOutput<?>> results = new HashMap<>(); Configuration conf = context.getConfiguration(); for (String name : conf.getStringCollection(K_NAMES)) { results.put(name, null); } return results; }
@Override public Iterable<HCatRecord> read(TaskInputOutputContext<?, ?, ?, ?> context) throws IOException { return new HCatRecordDataIterable(bundle, context.getConfiguration()); } }
public List<RTNode> getNodes() throws IOException { Configuration conf = taskContext.getConfiguration(); Path path = new Path(new Path(conf.get(PlanningParameters.CRUNCH_WORKING_DIRECTORY)), nodeContext.toString()); @SuppressWarnings("unchecked") List<RTNode> nodes = (List<RTNode>) DistCache.read(conf, path); if (nodes != null) { for (RTNode node : nodes) { node.initialize(this); } } return nodes; }
public List<RTNode> getNodes() throws IOException { Configuration conf = taskContext.getConfiguration(); Path path = new Path(new Path(conf.get(PlanningParameters.CRUNCH_WORKING_DIRECTORY)), nodeContext.toString()); @SuppressWarnings("unchecked") List<RTNode> nodes = (List<RTNode>) DistCache.read(conf, path); if (nodes != null) { for (RTNode node : nodes) { node.initialize(this); } } return nodes; }
public AggregatingMetricsStore(ContextWriter<OK,OV> contextWriter, TaskInputOutputContext<?,?,OK,OV> context) { this.contextWriter = contextWriter; this.context = context; this.maxSize = MetricsConfiguration.getAggBufferSize(context.getConfiguration()); this.table = new Text(MetricsConfiguration.getTable(context.getConfiguration())); this.counts = new Counts<>(maxSize * 2); this.flusher = new FlushMetrics(); }
@Override public void setContext( TaskInputOutputContext<Object, Object, Object, Object> context) { super.setContext(context); if (_mapper instanceof Configurable) { ((Configurable)_mapper).setConf(context.getConfiguration()); } }
@Override public void setContext(TaskInputOutputContext<Object,Object,Object,Object> context) { super.setContext(context); if (_mapper instanceof Configurable) { ((Configurable)_mapper).setConf(context.getConfiguration()); } }
private static URL getServiceUrl(TaskInputOutputContext context) throws MalformedURLException { Configuration configuration = context.getConfiguration(); return new URL(configuration.get(SERVICE_URL)); }
public CrunchTaskContext(TaskInputOutputContext<Object, Object, Object, Object> taskContext, NodeContext nodeContext) { this.taskContext = taskContext; this.nodeContext = nodeContext; Configuration conf = taskContext.getConfiguration(); Path path = new Path(nodeContext.toString()); try { this.nodes = (List<RTNode>) DistCache.read(conf, path); } catch (IOException e) { throw new CrunchRuntimeException("Could not read runtime node information", e); } }