@Override protected void cleanup( Context context ) throws IOException { final String tmpDirLoc = context.getConfiguration().get(TMP_FILE_LOC_KEY); final File tmpDir = Paths.get(tmpDirLoc).toFile(); FileUtils.deleteDirectory(tmpDir); context.progress(); context.setStatus("Clean"); } }
context.setStatus("Emitting Put " + count);
context.setStatus("DOWNLOADING"); context.progress(); final Path inPath = new Path(JobHelper.getURIFromSegment(segment)); context.getCounter(COUNTER_GROUP, COUNTER_LOADED).increment(inSize); context.setStatus("CONVERTING"); context.progress(); final File outDir = new File(tmpDir, "out"); context.setStatus("Validating"); HadoopDruidConverterConfig.INDEX_IO.validateTwoSegments(inDir, outDir); context.setStatus("Starting PUSH"); final Path baseOutputPath = new Path(config.getSegmentOutputPath()); final FileSystem outputFS = baseOutputPath.getFileSystem(context.getConfiguration()); ); context.progress(); context.setStatus("Finished PUSH"); final String finalSegmentString = HadoopDruidConverterConfig.jsonMapper.writeValueAsString(finalSegment); context.getConfiguration().set(ConvertingOutputFormat.PUBLISHED_SEGMENT_KEY, finalSegmentString); context.setStatus("Ready To Commit");
@Override protected void cleanup(Mapper<Text, Text, Text, Text>.Context context) throws IOException, InterruptedException { super.cleanup(context); tcs.close(); context.setStatus("Seen " + tcs.getTotalRecords() + " records..."); }
@Override protected void cleanup( Context context ) throws IOException, InterruptedException { final String tmpDirLoc = context.getConfiguration().get(TMP_FILE_LOC_KEY); final File tmpDir = Paths.get(tmpDirLoc).toFile(); FileUtils.deleteDirectory(tmpDir); context.progress(); context.setStatus("Clean"); } }
@Override public void setStatus(String msg) { context.setStatus(msg); } };
@Override protected void map(Text key, BytesWritable value, Context context) throws IOException, InterruptedException { PersonProtos.PersonWrapper dw = PersonProtos.PersonWrapper.parseFrom(ByteString .copyFrom(value.copyBytes())); context.setStatus("Processing doc with id: " + dw.getRowId()); String rdfAsString = CoansysTransformers.CTF.getWriter( CoansysTransformersConstants.PERSON_PROTO_MODEL, CoansysTransformersConstants.RDF_N_TRIPLES).write(dw); context.write(getMapOutputKey(), new Text(rdfAsString)); }
@Override protected void setup(Context context) throws IOException { context.setStatus(myStatus); assertEquals(myStatus, context.getStatus()); } }
protected void processError(Context c, Throwable t, Text k, Text v) { log.error("Caught exception processing key[" + k + "], value[" + v + "]", t); c.getCounter(Counters.FAILED_RECORDS).increment(1); c.setStatus("Records with failures = " + (++failedRecords)); } }
@Override public void map(LongWritable key, LongWritable value, Context context) throws IOException, InterruptedException { context.setStatus("Sleeping... " + value.get() + " ms left"); long now = System.currentTimeMillis(); if (now < key.get()) { TimeUnit.MILLISECONDS.sleep(key.get() - now); } }
protected void processError(Context c, Throwable t, Text k, Text v) { log.error("Caught exception processing key[" + k + "], value[" + v + "]", t); c.getCounter(Counters.FAILED_RECORDS).increment(1); c.setStatus("Records with failures = " + (++failedRecords)); } }
private void updateContextStatus(long totalBytesRead, Mapper.Context context, CopyListingFileStatus source2) { StringBuilder message = new StringBuilder(DistCpUtils.getFormatter() .format(totalBytesRead * 100.0f / source2.getLen())); message.append("% ") .append(description).append(" [") .append(DistCpUtils.getStringDescriptionFor(totalBytesRead)) .append('/') .append(DistCpUtils.getStringDescriptionFor(source2.getLen())) .append(']'); context.setStatus(message.toString()); }
@Override public void setStatus(String status) { worker.getContext().setStatus(status); } }
@Override public void setStatus(String status) { workerContext.getContext().setStatus(status); } }
context.setStatus("Written " + i + "/" + recordsToWrite + " records"); context.progress();
output.setStatus("Count " + count);
@Override protected void map(NullWritable key, NullWritable value, Context context) throws IOException, InterruptedException { String suffix = "/" + shortTaskId; int BLOCK_SIZE = (int) (recordsToWrite / 100); for (long i = 0; i < recordsToWrite;) { for (long idx = 0; idx < BLOCK_SIZE && i < recordsToWrite; idx++, i++) { int expIdx = rand.nextInt(BLOCK_SIZE) % VISIBILITY_EXPS_COUNT; String exp = VISIBILITY_EXPS[expIdx]; byte[] row = Bytes.add(Bytes.toBytes(i), Bytes.toBytes(suffix), Bytes.toBytes(exp)); Put p = new Put(row); p.addColumn(TEST_FAMILY, TEST_QUALIFIER, HConstants.EMPTY_BYTE_ARRAY); p.setCellVisibility(new CellVisibility(exp)); getCounter(expIdx).increment(1); mutator.mutate(p); if (i % 100 == 0) { context.setStatus("Written " + i + "/" + recordsToWrite + " records"); context.progress(); } } // End of block, flush all of them before we start writing anything // pointing to these! mutator.flush(); } }
@Override public void setStatus(String msg) { context.setStatus(msg); } };
@Override public void setStatus(String msg) { context.setStatus(msg); } };