Tabnine Logo
Mapper$Context.getJobID
Code IndexAdd Tabnine to your IDE (free)

How to use
getJobID
method
in
org.apache.hadoop.mapreduce.Mapper$Context

Best Java code snippets using org.apache.hadoop.mapreduce.Mapper$Context.getJobID (Showing top 13 results out of 315)

origin: apache/hive

killLauncherChildJobs(conf, context.getJobID().toString());
    context.getJobID().toString(),
    conf.get("user.name"),
    conf.get(OVERRIDE_CLASSPATH));
executeWatcher(pool, conf, context.getJobID(),
    proc.getInputStream(), statusdir, STDOUT_FNAME);
executeWatcher(pool, conf, context.getJobID(),
    proc.getErrorStream(), statusdir, STDERR_FNAME);
KeepAlive keepAlive = startCounterKeepAlive(pool, context);
updateJobStateToDoneAndWriteExitValue(conf, statusdir, context.getJobID().toString(),
  proc.exitValue());
 LOG.info("templeton: collecting logs for " + context.getJobID().toString()
      + " to " + statusdir + "/logs");
 LogRetriever logRetriever = new LogRetriever(statusdir, jobType, conf);
origin: apache/hive

WebHCatJTShim tracker = ShimLoader.getHadoopShims().getWebHCatShim(conf, ugi);
try {
 Set<String> childJobs = tracker.getJobs(context.getJobID().toString(), startTime);
 if (childJobs.size() == 0) {
  LOG.info("No child jobs found to reconnect with");
 updateJobStatePercentAndChildId(conf, context.getJobID().toString(), null, childJobIdString);
    exitCode = 1;
   updateJobStateToDoneAndWriteExitValue(conf, statusdir, context.getJobID().toString(),
     exitCode);
   break;
  updateJobStatePercentAndChildId(conf, context.getJobID().toString(), percent, null);
origin: apache/hbase

@Override
protected void setup(Context context) throws IOException, InterruptedException {
 id = Bytes.toBytes("Job: "+context.getJobID() + " Task: " + context.getTaskAttemptID());
 this.connection = ConnectionFactory.createConnection(context.getConfiguration());
 instantiateHTable();
origin: org.apache.hive.hcatalog/hive-webhcat

WebHCatJTShim tracker = ShimLoader.getHadoopShims().getWebHCatShim(conf, ugi);
try {
 Set<String> childJobs = tracker.getJobs(context.getJobID().toString(), startTime);
 if (childJobs.size() == 0) {
  LOG.info("No child jobs found to reconnect with");
 updateJobStatePercentAndChildId(conf, context.getJobID().toString(), null, childJobIdString);
    exitCode = 1;
   updateJobStateToDoneAndWriteExitValue(conf, statusdir, context.getJobID().toString(),
     exitCode);
   break;
  updateJobStatePercentAndChildId(conf, context.getJobID().toString(), percent, null);
origin: org.apache.hive.hcatalog/hive-webhcat

killLauncherChildJobs(conf, context.getJobID().toString());
    context.getJobID().toString(),
    conf.get("user.name"),
    conf.get(OVERRIDE_CLASSPATH));
executeWatcher(pool, conf, context.getJobID(),
    proc.getInputStream(), statusdir, STDOUT_FNAME);
executeWatcher(pool, conf, context.getJobID(),
    proc.getErrorStream(), statusdir, STDERR_FNAME);
KeepAlive keepAlive = startCounterKeepAlive(pool, context);
updateJobStateToDoneAndWriteExitValue(conf, statusdir, context.getJobID().toString(),
  proc.exitValue());
 LOG.info("templeton: collecting logs for " + context.getJobID().toString()
      + " to " + statusdir + "/logs");
 LogRetriever logRetriever = new LogRetriever(statusdir, jobType, conf);
origin: org.apache.pig/pig

/**
 * Here is set up the task id, in order to be attached to each tuple
 **/
@Override
public void setup(Context context) throws IOException, InterruptedException {
  super.setup(context);
  int taskIDInt = context.getTaskAttemptID().getTaskID().getId();
  taskID = String.valueOf(taskIDInt);
  pOperator = mp.getLeaves().get(0);
  while(true) {
    if(pOperator instanceof POCounter){
      ((POCounter) pOperator).setTaskId(taskIDInt);
      ((POCounter) pOperator).resetLocalCounter();
      break;
    } else {
      pOperator = mp.getPredecessors(pOperator).get(0);
    }
  }
  PigStatusReporter reporter = PigStatusReporter.getInstance();
  if (reporter != null) {
    reporter.incrCounter(
        JobControlCompiler.PIG_MAP_RANK_NAME
        + context.getJobID().toString(), taskID, 0);
  }
}
origin: org.apache.giraph/giraph-core

/**
 * Get string, replacing variables in the output.
 *
 * %JOB_ID% => job id
 * %TASK_ID% => task id
 * %USER% => owning user name
 *
 * @param key name of key to lookup
 * @param defaultValue value to return if no mapping exists. This can also
 *                     have variables, which will be substituted.
 * @param context mapper context
 * @return value for key, with variables expanded
 */
public String getStringVars(String key, String defaultValue,
              Mapper.Context context) {
 String value = get(key);
 if (value == null) {
  if (defaultValue == null) {
   return null;
  }
  value = defaultValue;
 }
 value = value.replace("%JOB_ID%", context.getJobID().toString());
 value = value.replace("%TASK_ID%", context.getTaskAttemptID().toString());
 value = value.replace("%USER%", get("user.name", "unknown_user"));
 return value;
}
origin: org.apache.kudu/kudu-client-tools

@Override
protected void setup(Context context) throws KuduException {
 id = "Job: " + context.getJobID() + " Task: " + context.getTaskAttemptID();
 Configuration conf = context.getConfiguration();
 CommandLineParser parser = new CommandLineParser(conf);
 client = parser.getClient();
 table = client.openTable(getTableName(conf));
 headsTable = client.openTable(getHeadsTable(conf));
 session = client.newSession();
 session.setFlushMode(SessionConfiguration.FlushMode.MANUAL_FLUSH);
 session.setMutationBufferSpace(WIDTH_DEFAULT);
 session.setIgnoreAllDuplicateRows(true);
 this.width = context.getConfiguration().getInt(GENERATOR_WIDTH_KEY, WIDTH_DEFAULT);
 current = new byte[this.width][];
 int wrapMultiplier = context.getConfiguration().getInt(GENERATOR_WRAP_KEY, WRAP_DEFAULT);
 this.wrap = (long)wrapMultiplier * width;
 this.numNodes = context.getConfiguration().getLong(
   GENERATOR_NUM_ROWS_PER_MAP_KEY, (long)WIDTH_DEFAULT * WRAP_DEFAULT);
 if (this.numNodes < this.wrap) {
  this.wrap = this.numNodes;
 }
}
origin: apache/sqoop

/**
 * Creates simple mapreduce context that says it has a single record but won't actually
 * return any records as tests are not expected to read the records.
 * @return
 * @throws java.io.IOException
 * @throws InterruptedException
 */
private Mapper.Context getContext() throws java.io.IOException, InterruptedException {
 Mapper.Context context = mock(Mapper.Context.class);
 Configuration conf = new Configuration();
 conf.set("mapreduce.task.id", UUID.randomUUID().toString());
 when(context.getConfiguration()).thenReturn(conf);
 TaskAttemptID taskAttemptID = new TaskAttemptID();
 when(context.getTaskAttemptID()).thenReturn(taskAttemptID);
 JobID jobID = new JobID("job001", 1);
 when(context.getJobID()).thenReturn(jobID);
 // Simulate a single record by answering 'true' once
 when(context.nextKeyValue()).thenAnswer(new Answer<Object>() {
  boolean answer = true;
  @Override
  public Object answer(InvocationOnMock invocation) {
   if (answer == true) {
    answer = false;
    return true;
   }
   return false;
  }
 });
 return context;
}
origin: org.apache.hbase/hbase-it

@Override
protected void setup(Context context) throws IOException, InterruptedException {
 id = Bytes.toBytes("Job: "+context.getJobID() + " Task: " + context.getTaskAttemptID());
 this.connection = ConnectionFactory.createConnection(context.getConfiguration());
 instantiateHTable();
 this.width = context.getConfiguration().getInt(GENERATOR_WIDTH_KEY, WIDTH_DEFAULT);
 current = new byte[this.width][];
 int wrapMultiplier = context.getConfiguration().getInt(GENERATOR_WRAP_KEY, WRAP_DEFAULT);
 this.wrap = (long)wrapMultiplier * width;
 this.numNodes = context.getConfiguration().getLong(
   GENERATOR_NUM_ROWS_PER_MAP_KEY, (long)WIDTH_DEFAULT * WRAP_DEFAULT);
 if (this.numNodes < this.wrap) {
  this.wrap = this.numNodes;
 }
 this.multipleUnevenColumnFamilies = isMultiUnevenColumnFamilies(context.getConfiguration());
 this.numWalkers = context.getConfiguration().getInt(CONCURRENT_WALKER_KEY, CONCURRENT_WALKER_DEFAULT);
 this.walkersStop = false;
 this.conf = context.getConfiguration();
}
origin: org.apache.pig/pig

  /**
   * While tuples are collected, they are counted one by one by a global counter per task.
   **/
  @Override
  public void collect(Context context, Tuple tuple)
  throws InterruptedException, IOException {
    context.write(null, tuple);
    PigStatusReporter reporter = PigStatusReporter.getInstance();
    if (reporter != null) {
      reporter.incrCounter(
          JobControlCompiler.PIG_MAP_RANK_NAME
          + context.getJobID().toString(), taskID, 1);
    }
  }
}
origin: apache/giraph

@Override
public void preApplication()
  throws InstantiationException, IllegalAccessException {
 workerLogic = new BlockWorkerContextLogic();
 workerLogic.preApplication(new BlockWorkerContextApiWrapper<>(this),
   new BlockOutputHandle(getContext().getJobID().toString(),
     getConf(), getContext()));
}
origin: apache/giraph

@Override
public void initialize() throws InstantiationException,
  IllegalAccessException {
 blockMasterLogic.initialize(getConf(), new BlockMasterApiWrapper(this,
   new BlockOutputHandle(getContext().getJobID().toString(),
   getConf(), getContext())));
}
org.apache.hadoop.mapreduceMapper$ContextgetJobID

Popular methods of Mapper$Context

  • write
  • getConfiguration
  • getCounter
  • getInputSplit
  • progress
  • setStatus
  • getTaskAttemptID
  • nextKeyValue
  • getCurrentValue
  • getCurrentKey
  • getNumReduceTasks
  • getInputFormatClass
  • getNumReduceTasks,
  • getInputFormatClass,
  • getLocalCacheFiles,
  • getOutputCommitter,
  • getCredentials,
  • getLocalCacheArchives,
  • getStatus,
  • getCacheArchives,
  • getCacheFiles

Popular in Java

  • Making http post requests using okhttp
  • scheduleAtFixedRate (ScheduledExecutorService)
  • onRequestPermissionsResult (Fragment)
  • startActivity (Activity)
  • Font (java.awt)
    The Font class represents fonts, which are used to render text in a visible way. A font provides the
  • ResultSet (java.sql)
    An interface for an object which represents a database table entry, returned as the result of the qu
  • Deque (java.util)
    A linear collection that supports element insertion and removal at both ends. The name deque is shor
  • HashMap (java.util)
    HashMap is an implementation of Map. All optional operations are supported.All elements are permitte
  • ZipFile (java.util.zip)
    This class provides random read access to a zip file. You pay more to read the zip file's central di
  • JButton (javax.swing)
  • Top 12 Jupyter Notebook extensions
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now