Tabnine Logo
Mapper$Context.getCurrentKey
Code IndexAdd Tabnine to your IDE (free)

How to use
getCurrentKey
method
in
org.apache.hadoop.mapreduce.Mapper$Context

Best Java code snippets using org.apache.hadoop.mapreduce.Mapper$Context.getCurrentKey (Showing top 20 results out of 315)

origin: apache/incubator-druid

@Override
public void run(Context context) throws IOException, InterruptedException
{
 setup(context);
 while (context.nextKeyValue()) {
  map(context.getCurrentKey(), context.getCurrentValue(), context);
 }
 for (Map.Entry<Interval, HyperLogLogCollector> entry : hyperLogLogs.entrySet()) {
  context.write(
    new LongWritable(entry.getKey().getStartMillis()),
    new BytesWritable(entry.getValue().toByteArray())
  );
 }
 cleanup(context);
}
origin: apache/hbase

 @Override
 protected boolean filter(Context context, Cell cell) {
  // TODO: Can I do a better compare than this copying out key?
  byte [] row = new byte [cell.getRowLength()];
  System.arraycopy(cell.getRowArray(), cell.getRowOffset(), row, 0, cell.getRowLength());
  boolean b = this.keysToFind.contains(row);
  if (b) {
   String keyStr = Bytes.toStringBinary(row);
   try {
    LOG.info("Found cell=" + cell + " , walKey=" + context.getCurrentKey());
   } catch (IOException|InterruptedException e) {
    LOG.warn(e.toString(), e);
   }
   if (rows.addAndGet(1) < MISSING_ROWS_TO_LOG) {
    context.getCounter(FOUND_GROUP_KEY, keyStr).increment(1);
   }
   context.getCounter(FOUND_GROUP_KEY, "CELL_WITH_MISSING_ROW").increment(1);
  }
  return b;
 }
}
origin: apache/hbase

 @Override
 protected boolean filter(Context context, Cell cell) {
  // TODO: Can I do a better compare than this copying out key?
  byte [] row = new byte [cell.getRowLength()];
  System.arraycopy(cell.getRowArray(), cell.getRowOffset(), row, 0, cell.getRowLength());
  boolean b = this.keysToFind.contains(row);
  if (b) {
   String keyStr = Bytes.toStringBinary(row);
   try {
    LOG.info("Found cell=" + cell + " , walKey=" + context.getCurrentKey());
   } catch (IOException|InterruptedException e) {
    LOG.warn(e.toString(), e);
   }
   if (rows.addAndGet(1) < MISSING_ROWS_TO_LOG) {
    context.getCounter(FOUND_GROUP_KEY, keyStr).increment(1);
   }
   context.getCounter(FOUND_GROUP_KEY, "CELL_WITH_MISSING_ROW").increment(1);
  }
  return b;
 }
}
origin: ch.cern.hadoop/hadoop-mapreduce-client-core

 /**
  * Expert users can override this method for more complete control over the
  * execution of the Mapper.
  * @param context
  * @throws IOException
  */
 public void run(Context context) throws IOException, InterruptedException {
  setup(context);
  try {
   while (context.nextKeyValue()) {
    map(context.getCurrentKey(), context.getCurrentValue(), context);
   }
  } finally {
   cleanup(context);
  }
 }
}
origin: io.prestosql.hadoop/hadoop-apache

 /**
  * Expert users can override this method for more complete control over the
  * execution of the Mapper.
  * @param context
  * @throws IOException
  */
 public void run(Context context) throws IOException, InterruptedException {
  setup(context);
  try {
   while (context.nextKeyValue()) {
    map(context.getCurrentKey(), context.getCurrentValue(), context);
   }
  } finally {
   cleanup(context);
  }
 }
}
origin: com.github.jiayuhan-it/hadoop-mapreduce-client-core

 /**
  * Expert users can override this method for more complete control over the
  * execution of the Mapper.
  * @param context
  * @throws IOException
  */
 public void run(Context context) throws IOException, InterruptedException {
  setup(context);
  try {
   while (context.nextKeyValue()) {
    map(context.getCurrentKey(), context.getCurrentValue(), context);
   }
  } finally {
   cleanup(context);
  }
 }
}
origin: io.hops/hadoop-mapreduce-client-core

 /**
  * Expert users can override this method for more complete control over the
  * execution of the Mapper.
  * @param context
  * @throws IOException
  */
 public void run(Context context) throws IOException, InterruptedException {
  setup(context);
  try {
   while (context.nextKeyValue()) {
    map(context.getCurrentKey(), context.getCurrentValue(), context);
   }
  } finally {
   cleanup(context);
  }
 }
}
origin: opencb/opencga

@Override
public void run(Context context) throws IOException, InterruptedException {
  this.setup(context);
  try {
    while (context.nextKeyValue()) {
      this.map(context.getCurrentKey(), context.getCurrentValue(), context);
      annotateVariants(context, false);
    }
    annotateVariants(context, true);
  } catch (VariantAnnotatorException e) {
    throw new RuntimeException(e);
  } finally {
    this.cleanup(context);
  }
}
origin: org.apache.druid/druid-indexing-hadoop

@Override
public void run(Context context) throws IOException, InterruptedException
{
 setup(context);
 while (context.nextKeyValue()) {
  map(context.getCurrentKey(), context.getCurrentValue(), context);
 }
 for (Map.Entry<Interval, HyperLogLogCollector> entry : hyperLogLogs.entrySet()) {
  context.write(
    new LongWritable(entry.getKey().getStartMillis()),
    new BytesWritable(entry.getValue().toByteArray())
  );
 }
 cleanup(context);
}
origin: io.druid/druid-indexing-hadoop

@Override
public void run(Context context) throws IOException, InterruptedException
{
 setup(context);
 while (context.nextKeyValue()) {
  map(context.getCurrentKey(), context.getCurrentValue(), context);
 }
 for (Map.Entry<Interval, HyperLogLogCollector> entry : hyperLogLogs.entrySet()) {
  context.write(
    new LongWritable(entry.getKey().getStartMillis()),
    new BytesWritable(entry.getValue().toByteArray())
  );
 }
 cleanup(context);
}
origin: com.n3twork.druid/druid-indexing-hadoop

@Override
public void run(Context context) throws IOException, InterruptedException
{
 setup(context);
 while (context.nextKeyValue()) {
  map(context.getCurrentKey(), context.getCurrentValue(), context);
 }
 for (Map.Entry<Interval, HyperLogLogCollector> entry : hyperLogLogs.entrySet()) {
  context.write(
    new LongWritable(entry.getKey().getStartMillis()),
    new BytesWritable(entry.getValue().toByteArray())
  );
 }
 cleanup(context);
}
origin: com.marklogic/mlcp

  @Override
  public void run(Context context) throws IOException, InterruptedException {
    setup(context);
    try {
      while (!ContentPump.shutdown && context.nextKeyValue()) {
        map(context.getCurrentKey(), context.getCurrentValue(), context);
      }
    } finally {
      if (ContentPump.shutdown && LOG.isDebugEnabled()) {
        LOG.debug("Aborting task...");
      }
      cleanup(context);
    }
  }
}
origin: com.facebook.hadoop/hadoop-core

 /**
  * Expert users can override this method for more complete control over the
  * execution of the Mapper.
  * @param context
  * @throws IOException
  */
 public void run(Context context) throws IOException, InterruptedException {
  setup(context);
  while (context.nextKeyValue()) {
   map(context.getCurrentKey(), context.getCurrentValue(), context);
  }
  cleanup(context);
 }
}
origin: org.apache.hadoop/hadoop-mapred

 /**
  * Expert users can override this method for more complete control over the
  * execution of the Mapper.
  * @param context
  * @throws IOException
  */
 public void run(Context context) throws IOException, InterruptedException {
  setup(context);
  while (context.nextKeyValue()) {
   map(context.getCurrentKey(), context.getCurrentValue(), context);
  }
  cleanup(context);
 }
}
origin: apache/incubator-rya

private static RyaStatement nextRyaStatement(final Context context, final RyaTripleContext ryaContext) throws IOException, InterruptedException {
  RyaStatement ryaStatement = null;
  if (context.nextKeyValue()) {
    final Key key = context.getCurrentKey();
    final Value value = context.getCurrentValue();
    try {
      ryaStatement = createRyaStatement(key, value, ryaContext);
    } catch (final TripleRowResolverException e) {
      log.error("TripleRowResolverException encountered while creating statement", e);
    }
  }
  return ryaStatement;
}
origin: ShifuML/shifu

@Override
public boolean nextKeyValue() throws IOException, InterruptedException {
  synchronized(outer) {
    if(!outer.nextKeyValue()) {
      return false;
    }
    key = ReflectionUtils.copy(outer.getConfiguration(), outer.getCurrentKey(), key);
    value = ReflectionUtils.copy(conf, outer.getCurrentValue(), value);
    return true;
  }
}
origin: pl.edu.icm.coansys/commons

@Override
public void run(Context context) throws IOException, InterruptedException {
  setup(context);
  while ((count < fequency) && context.nextKeyValue()) {
    float diff = fequency - count;
    if (random.nextFloat() <= diff) {
      map(context.getCurrentKey(), context.getCurrentValue(), context);
    }
    count++;
  }
  cleanup(context);
}
origin: pl.edu.icm.coansys/commons

@Override
public void run(Context context) throws IOException, InterruptedException {
  setup(context);
  while ((count < limit) && context.nextKeyValue()) {
    map(context.getCurrentKey(), context.getCurrentValue(), context);
    count++;
  }
  cleanup(context);
}
origin: ShifuML/shifu

@Override
public boolean nextKeyValue() throws IOException, InterruptedException {
  synchronized(outer) {
    if(!outer.nextKeyValue()) {
      return false;
    }
    key = ReflectionUtils.copy(outer.getConfiguration(), outer.getCurrentKey(), key);
    value = ReflectionUtils.copy(conf, outer.getCurrentValue(), value);
    return true;
  }
}
origin: apache/incubator-gobblin

this.map(context.getCurrentKey(), context.getCurrentValue(), context);
org.apache.hadoop.mapreduceMapper$ContextgetCurrentKey

Popular methods of Mapper$Context

  • write
  • getConfiguration
  • getCounter
  • getInputSplit
  • progress
  • setStatus
  • getTaskAttemptID
  • nextKeyValue
  • getCurrentValue
  • getNumReduceTasks
  • getJobID
  • getInputFormatClass
  • getJobID,
  • getInputFormatClass,
  • getLocalCacheFiles,
  • getOutputCommitter,
  • getCredentials,
  • getLocalCacheArchives,
  • getStatus,
  • getCacheArchives,
  • getCacheFiles

Popular in Java

  • Reactive rest calls using spring rest template
  • putExtra (Intent)
  • findViewById (Activity)
  • onCreateOptionsMenu (Activity)
  • String (java.lang)
  • URLEncoder (java.net)
    This class is used to encode a string using the format required by application/x-www-form-urlencoded
  • Pattern (java.util.regex)
    Patterns are compiled regular expressions. In many cases, convenience methods such as String#matches
  • DataSource (javax.sql)
    An interface for the creation of Connection objects which represent a connection to a database. This
  • Base64 (org.apache.commons.codec.binary)
    Provides Base64 encoding and decoding as defined by RFC 2045.This class implements section 6.8. Base
  • Join (org.hibernate.mapping)
  • Best plugins for Eclipse
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now