Refine search
@Override public long push() throws IOException { try { progressable.progress(); if (outputFS.exists(descriptorPath)) { if (!outputFS.delete(descriptorPath, false)) { throw new IOE("Failed to delete descriptor at [%s]", descriptorPath); } } try (final OutputStream descriptorOut = outputFS.create( descriptorPath, true, DEFAULT_FS_BUFFER_SIZE, progressable )) { HadoopDruidIndexerConfig.JSON_MAPPER.writeValue(descriptorOut, segment); } } catch (RuntimeException | IOException ex) { log.info(ex, "Exception in descriptor pusher retry loop"); throw ex; } return -1; } },
public void cleanupJob(JobContext context) throws IOException { JobConf conf = context.getJobConf(); // do the clean up of temporary directory Path outputPath = FileOutputFormat.getOutputPath(conf); if (outputPath != null) { Path tmpDir = new Path(outputPath, FileOutputCommitter.TEMP_DIR_NAME); FileSystem fileSys = tmpDir.getFileSystem(conf); context.getProgressible().progress(); if (fileSys.exists(tmpDir)) { fileSys.delete(tmpDir, true); } } }
long size = 0L; final byte[] buffer = new byte[1 << 13]; progressable.progress(); try (ZipInputStream in = new ZipInputStream(fileSystem.open(zip, 1 << 13))) { for (ZipEntry entry = in.getNextEntry(); entry != null; entry = in.getNextEntry()) { final String fileName = entry.getName(); progressable.progress(); if (len == 0) { continue; progressable.progress(); return size;
public boolean needsTaskCommit(TaskAttemptContext context) throws IOException { Path taskOutputPath = getTempTaskOutputPath(context); if (taskOutputPath != null) { context.getProgressible().progress(); // Get the file-system for the task output directory FileSystem fs = taskOutputPath.getFileSystem(context.getJobConf()); // since task output path is created on demand, // if it exists, task needs a commit if (fs.exists(taskOutputPath)) { return true; } } return false; }
@Override public void progress() { progressable.progress(); } };
/** * Invoke TaskAttemptContext.progress(). Works with both * Hadoop 1 and 2. */ public static void progress(TaskAttemptContext context) { ((Progressable)context).progress(); }
public void abortTask(TaskAttemptContext context) { Path taskOutputPath = getTempTaskOutputPath(context); try { if (taskOutputPath != null) { FileSystem fs = taskOutputPath.getFileSystem(context.getJobConf()); context.getProgressible().progress(); fs.delete(taskOutputPath, true); } } catch (IOException ie) { LOG.warn("Error discarding output" + StringUtils.stringifyException(ie)); } }
@Override public void run() { if (log != null && log.isTraceEnabled()) { log.trace(String.format("Heartbeat/progress sent to Hadoop for %s", id)); } progressable.progress(); } // start the reporter before timing out
public static long copyFileToZipStream( File file, ZipOutputStream zipOutputStream, Progressable progressable ) throws IOException { createNewZipEntry(zipOutputStream, file); long numRead = 0; try (FileInputStream inputStream = new FileInputStream(file)) { byte[] buf = new byte[0x10000]; for (int bytesRead = inputStream.read(buf); bytesRead >= 0; bytesRead = inputStream.read(buf)) { progressable.progress(); if (bytesRead == 0) { continue; } zipOutputStream.write(buf, 0, bytesRead); progressable.progress(); numRead += bytesRead; } } zipOutputStream.closeEntry(); progressable.progress(); return numRead; }
@Override public void sort(final IndexedSortable s, final int p, final int r, final Progressable rep) { final int N = r - p; // build heap w/ reverse comparator, then write in-place from end final int t = Integer.highestOneBit(N); for (int i = t; i > 1; i >>>= 1) { for (int j = i >>> 1; j < i; ++j) { downHeap(s, p-1, j, N + 1); } if (null != rep) { rep.progress(); } } for (int i = r - 1; i > p; --i) { s.swap(p, i); downHeap(s, p - 1, 1, i - p + 1); } } }
@Override protected boolean lessThan(Object a, Object b) { // indicate we're making progress if (progress != null) { progress.progress(); } SegmentDescriptor msa = (SegmentDescriptor)a; SegmentDescriptor msb = (SegmentDescriptor)b; return comparator.compare(msa.getKey().getData(), 0, msa.getKey().getLength(), msb.getKey().getData(), 0, msb.getKey().getLength()) < 0; } @Override
private static void sortInternal(final IndexedSortable s, int p, int r, final Progressable rep, int depth) { if (null != rep) { rep.progress();
public void progressChanged(ProgressEvent progressEvent) { if (progress != null) { progress.progress(); } } }
/** * Invoke TaskAttemptContext.progress(). Works with both * Hadoop 1 and 2. */ public static void progress(TaskAttemptContext context) { ((Progressable)context).progress(); }
@Override public void progress() { progress.progress(); } }
@Override public void progress() { progressable.progress(); } }
@Override public void progress() { progressable.progress(); } };
@Override public void progress() { progressable.progress(); } };