Tabnine Logo
Mapper$Context.write
Code IndexAdd Tabnine to your IDE (free)

How to use
write
method
in
org.apache.hadoop.mapreduce.Mapper$Context

Best Java code snippets using org.apache.hadoop.mapreduce.Mapper$Context.write (Showing top 20 results out of 2,061)

Refine searchRefine arrow

  • Text.toString
  • Mapper.Context.getCounter
  • Counter.increment
  • Text.<init>
  • Text.set
origin: apache/kylin

@Override
public void doMap(NullWritable key, Text value, Context context) throws IOException, InterruptedException {
  tmpBuf.clear();
  int size = value.getLength()+ 1;
  if (size >= tmpBuf.capacity()) {
    tmpBuf = ByteBuffer.allocate(countNewSize(tmpBuf.capacity(), size));
  }
  tmpBuf.put(Bytes.toBytes(index)[3]);
  tmpBuf.put(value.getBytes(), 0, value.getLength());
  outputKey.set(tmpBuf.array(), 0, tmpBuf.position());
  sortableKey.init(outputKey, type);
  context.write(sortableKey, NullWritable.get());
}
origin: apache/incubator-druid

 @Override
 protected void innerMap(
   InputRow inputRow,
   Context context
 ) throws IOException, InterruptedException
 {
  final List<Object> groupKey = Rows.toGroupKey(
    rollupGranularity.bucketStart(inputRow.getTimestamp()).getMillis(),
    inputRow
  );
  context.write(
    new BytesWritable(HadoopDruidIndexerConfig.JSON_MAPPER.writeValueAsBytes(groupKey)),
    NullWritable.get()
  );
  context.getCounter(HadoopDruidIndexerConfig.IndexJobCounters.ROWS_PROCESSED_COUNTER).increment(1);
 }
}
origin: Alluxio/alluxio

 /**
  * Records the Status and IP address of each mapper task node.
  */
 @Override
 protected void map(Object ignoredKey, Object ignoredValue, Context context)
   throws IOException, InterruptedException {
  context.write(new Text(CheckerUtils.performIntegrationChecks().toString()),
    new Text(CheckerUtils.getLocalAddress()));
 }
}
origin: apache/hive

 @Override
 protected void map(LongWritable key, Text value, Context context)
  throws IOException, InterruptedException {
  StringTokenizer itr = new StringTokenizer(value.toString());
  while (itr.hasMoreTokens()) {
   word.set(itr.nextToken());
   context.write(word, one);
  }
 }
}
origin: apache/incubator-pinot

@Override
protected void map(LongWritable key, Text value, Context context)
  throws IOException, InterruptedException {
 String line = value.toString();
 String[] lineSplits = line.split(" ");
 context.write(new LongWritable(Long.parseLong(lineSplits[2])), new Text(
   FileSystem.get(_properties).listStatus(new Path(_localHdfsSegmentTarPath + "/"))[0].getPath().getName()));
 LOGGER.info("Finished the job successfully");
origin: apache/hbase

currentFamily = null;
currentQualifier = null;
context.getCounter(Counters.ROWS).increment(1);
context.write(new Text("Total ROWS"), new IntWritable(1));
  currentFamilyName = Bytes.toStringBinary(currentFamily);
  currentQualifier = null;
  context.getCounter("CF", currentFamilyName).increment(1);
  if (1 == context.getCounter("CF", currentFamilyName).getValue()) {
   context.write(new Text("Total Families Across all Rows"), new IntWritable(1));
   context.write(new Text(currentFamily), new IntWritable(1));
  currentRowQualifierName = currentRowKey + separator + currentQualifierName;
  context.write(new Text("Total Qualifiers across all Rows"),
    new IntWritable(1));
  context.write(new Text(currentQualifierName), new IntWritable(1));
 context.write(new Text(currentRowQualifierName + "_Versions"), new IntWritable(1));
context.getCounter(Counters.CELLS).increment(cellCount);
origin: intel-hadoop/HiBench

  public void map(Object key, Text value, Context context) throws IOException, InterruptedException {
    StringTokenizer tokens = new StringTokenizer(value.toString(), " \t\n\r\f%");
    String attr = tokens.nextToken(); 
    if (attr.endsWith(":tput_samples")){
      String[] tags=attr.split(":");
      String[] samples = tokens.nextToken().split(";");
      for(int j=0; !samples[j].startsWith("EoR"); j++){
        t.set(samples[j]);
        context.write(new Text(tags[1]), t);
      }
    }
  }
}
origin: tdunning/MiA

@Override
protected void map(LongWritable key, Text value, Context context) throws IOException,
                                 InterruptedException {
 String[] fields = splitter.split(value.toString());
 if (fields.length - 1 < selectedField || fields.length - 1 < groupByField) {
  context.getCounter("Map", "LinesWithErrors").increment(1);
  return;
 }
 String oKey = fields[groupByField];
 String oValue = fields[selectedField];
 context.write(new Text(oKey), new Text(oValue));
 
}
 
origin: apache/hbase

String [] values = line.toString().split(",");
if(values.length != 4) {
 return;
 context.write(new ImmutableBytesWritable(row), put);
} catch (InterruptedException e) {
 LOG.error("Interrupted emitting put", e);
origin: apache/hbase

 @Override
 protected void map(LongWritable key, Text value, final Context context)
     throws IOException, InterruptedException {
  Status status = new Status() {
   @Override
   public void setStatus(String msg) {
     context.setStatus(msg);
   }
  };
  ObjectMapper mapper = new ObjectMapper();
  TestOptions opts = mapper.readValue(value.toString(), TestOptions.class);
  Configuration conf = HBaseConfiguration.create(context.getConfiguration());
  final Connection con = ConnectionFactory.createConnection(conf);
  AsyncConnection asyncCon = null;
  try {
   asyncCon = ConnectionFactory.createAsyncConnection(conf).get();
  } catch (ExecutionException e) {
   throw new IOException(e);
  }
  // Evaluation task
  RunResult result = PerformanceEvaluation.runOneClient(this.cmd, conf, con, asyncCon, opts, status);
  // Collect how much time the thing took. Report as map output and
  // to the ELAPSED_TIME counter.
  context.getCounter(Counter.ELAPSED_TIME).increment(result.duration);
  context.getCounter(Counter.ROWS).increment(opts.perClientRunRows);
  context.write(new LongWritable(opts.startRow), new LongWritable(result.duration));
  context.progress();
 }
}
origin: ch.cern.hadoop/hadoop-mapreduce-client-jobclient

 public void map(Object key, Text value, Context context)
   throws IOException, InterruptedException {
  context.getCounter("MyCounterGroup", "MAP_INPUT_RECORDS").increment(1);
  StringTokenizer iter = new StringTokenizer(value.toString());
  while (iter.hasMoreTokens()) {
   word.set(iter.nextToken());
   context.write(word, one);
   context.getCounter("MyCounterGroup", "MAP_OUTPUT_RECORDS").increment(1);
  }
 }
}
origin: apache/kylin

  context.getCounter(BatchConstants.MAPREDUCE_COUNTER_GROUP_NAME, "Skipped records").increment(1L);
  if (skipCounter++ % BatchConstants.NORMAL_RECORD_LOG_THRESHOLD == 0) {
    logger.info("Skipping record with ordinal: " + skipCounter);
context.getCounter(BatchConstants.MAPREDUCE_COUNTER_GROUP_NAME, "Processed records").increment(1L);
  Cuboid childCuboid = Cuboid.findForMandatory(cubeDesc, child);
  result = ndCuboidBuilder.buildKey(parentCuboid, childCuboid, rowKeySplitter.getSplitBuffers());
  outputKey.set(result.getSecond().array(), 0, result.getFirst());
  context.write(outputKey, value);
origin: apache/ignite

  /** {@inheritDoc} */
  @Override public void map(Object key, Text val, Context ctx) throws IOException, InterruptedException {
    while (mapLockFile.exists())
      Thread.sleep(50);
    StringTokenizer wordList = new StringTokenizer(val.toString());
    while (wordList.hasMoreTokens()) {
      word.set(wordList.nextToken());
      ctx.write(word, one);
    }
  }
}
origin: usc-isi-i2/Web-Karma

@Override
public void map(Writable key, Text value, Context context)
    throws IOException, InterruptedException {
  Text jsonDoc = new Text(value.toString());
  context.write(NullWritable.get(), jsonDoc);
}
origin: apache/incubator-druid

context.getCounter(COUNTER_GROUP, COUNTER_LOADED).increment(inSize);
final String finalSegmentString = HadoopDruidConverterConfig.jsonMapper.writeValueAsString(finalSegment);
context.getConfiguration().set(ConvertingOutputFormat.PUBLISHED_SEGMENT_KEY, finalSegmentString);
context.write(new Text("dataSegment"), new Text(finalSegmentString));
context.getCounter(COUNTER_GROUP, COUNTER_WRITTEN).increment(finalSegment.getSize());
context.progress();
context.setStatus("Ready To Commit");
origin: cdapio/cdap

@Override
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
 Text output = new Text();
 LongWritable one = new LongWritable(1L);
 for (String word : value.toString().split("\\w+")) {
  output.set(word);
  context.write(output, one);
 }
 clusterNameTable.write(prefix + "mapper.cluster.name", clusterName);
}
origin: apache/hive

 @Override
 public void map(NullWritable key, HCatRecord value, Context context)
  throws IOException, InterruptedException {
  LOG.info("HCatRecord: " + value);
  context.write(NullWritable.get(), new Text(value.toString()));
 }
}
origin: tdunning/MiA

@Override
protected void map(LongWritable key, Text line, Context context) throws IOException,
                                InterruptedException {
 String[] fields = splitter.split(line.toString());
 if (fields.length < 4) {
  context.getCounter("Map", "LinesWithErrors").increment(1);
  return;
 }
 String artist = fields[1];
 context.write(new Text(artist), new IntWritable(0));
}
 
origin: apache/hive

 @Override
 public void map(Object key, Text value, Context context)
   throws IOException, InterruptedException {
  String items[] = value.toString().split("\\s+");
  context.write(new IntWritable(items.length), value);
 }
}
origin: larsgeorge/hbase-book

 /**
  * Maps the input.
  *
  * @param offset The current offset into the input file.
  * @param line The current line of the file.
  * @param context The task context.
  * @throws IOException When mapping the input fails.
  */
 @Override
 public void map(LongWritable offset, Text line, Context context)
 throws IOException {
  try {
   String lineString = line.toString();
   byte[] rowkey = DigestUtils.md5(lineString);
   Put put = new Put(rowkey);
   put.addColumn(family, qualifier, Bytes.toBytes(lineString));
   context.write(new ImmutableBytesWritable(rowkey), put);
   context.getCounter(Counters.LINES).increment(1);
  } catch (Exception e) {
   e.printStackTrace();
  }
 }
}
org.apache.hadoop.mapreduceMapper$Contextwrite

Popular methods of Mapper$Context

  • getConfiguration
  • getCounter
  • getInputSplit
  • progress
  • setStatus
  • getTaskAttemptID
  • nextKeyValue
  • getCurrentValue
  • getCurrentKey
  • getNumReduceTasks
  • getJobID
  • getInputFormatClass
  • getJobID,
  • getInputFormatClass,
  • getLocalCacheFiles,
  • getOutputCommitter,
  • getCredentials,
  • getLocalCacheArchives,
  • getStatus,
  • getCacheArchives,
  • getCacheFiles

Popular in Java

  • Reading from database using SQL prepared statement
  • onRequestPermissionsResult (Fragment)
  • findViewById (Activity)
  • setContentView (Activity)
  • BufferedImage (java.awt.image)
    The BufferedImage subclass describes an java.awt.Image with an accessible buffer of image data. All
  • PrintStream (java.io)
    Fake signature of an existing Java class.
  • UnknownHostException (java.net)
    Thrown when a hostname can not be resolved.
  • Permission (java.security)
    Legacy security code; do not use.
  • Iterator (java.util)
    An iterator over a sequence of objects, such as a collection.If a collection has been changed since
  • JCheckBox (javax.swing)
  • Top Vim plugins
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now