public ContentSummary build() { // Set it in case applications call QuotaUsage#getFileAndDirectoryCount. super.fileAndDirectoryCount(this.fileCount + this.directoryCount); return new ContentSummary(this); }
@Override public ContentSummary getContentSummary(Path f) throws IOException { // HarFileSystem has a bug where this method does not work properly // if the underlying FS is HDFS. See MAPREDUCE-1877 for more // information. This method is from FileSystem. FileStatus status = getFileStatus(f); if (!status.isDir()) { // f is a file return new ContentSummary(status.getLen(), 1, 0); } // f is a directory long[] summary = {0, 0, 1}; for(FileStatus s : listStatus(f)) { ContentSummary c = s.isDir() ? getContentSummary(s.getPath()) : new ContentSummary(s.getLen(), 1, 0); summary[0] += c.getLength(); summary[1] += c.getFileCount(); summary[2] += c.getDirectoryCount(); } return new ContentSummary(summary[0], summary[1], summary[2]); } }
@Override public ContentSummary getContentSummary(Path p, JobConf job) throws IOException { //length, file count, directory count long[] summary = {0, 0, 0}; List<Path> targetPaths = new ArrayList<Path>(); List<Path> symlinkPaths = new ArrayList<Path>(); try { getTargetPathsFromSymlinksDirs( job, new Path[]{p}, targetPaths, symlinkPaths); } catch (Exception e) { throw new IOException( "Error parsing symlinks from specified job input path.", e); } for(Path path : targetPaths) { FileSystem fs = path.getFileSystem(job); ContentSummary cs = fs.getContentSummary(path); summary[0] += cs.getLength(); summary[1] += cs.getFileCount(); summary[2] += cs.getDirectoryCount(); } return new ContentSummary(summary[0], summary[1], summary[2]); }
@Override public ContentSummary getContentSummary(Path p, JobConf job) throws IOException { //length, file count, directory count long[] summary = {0, 0, 0}; List<Path> targetPaths = new ArrayList<Path>(); List<Path> symlinkPaths = new ArrayList<Path>(); try { getTargetPathsFromSymlinksDirs( job, new Path[]{p}, targetPaths, symlinkPaths); } catch (Exception e) { throw new IOException( "Error parsing symlinks from specified job input path.", e); } for(Path path : targetPaths) { FileSystem fs = path.getFileSystem(job); ContentSummary cs = fs.getContentSummary(path); summary[0] += cs.getLength(); summary[1] += cs.getFileCount(); summary[2] += cs.getDirectoryCount(); } return new ContentSummary(summary[0], summary[1], summary[2]); }
total += estimator.estimate(jobConf, scanOp, -1).getTotalLength(); resultMap.put(pathStr, new ContentSummary(total, -1, -1)); } else { return new ContentSummary(summary[0], summary[1], summary[2]); } finally { if (executor != null) {
total += estimator.estimate(jobConf, scanOp, -1).getTotalLength(); resultMap.put(pathStr, new ContentSummary(total, -1, -1)); } else {
total += estimator.estimate(jobConf, scanOp, -1).getTotalLength(); resultMap.put(pathStr, new ContentSummary(total, -1, -1)); } else { return new ContentSummary(summary[0], summary[1], summary[2]); } finally { HiveInterruptUtils.remove(interrup);
total += estimator.estimate(jobConf, scanOp, -1).getTotalLength(); resultMap.put(pathStr, new ContentSummary(total, -1, -1)); } else {
/** {@inheritDoc} */ @Override public ContentSummary getContentSummary(Path f) throws IOException { A.notNull(f, "f"); enterBusy(); try { IgfsPathSummary sum = rmtClient.contentSummary(convert(f)); return new ContentSummary(sum.totalLength(), sum.filesCount(), sum.directoriesCount(), -1, sum.totalLength(), rmtClient.fsStatus().spaceTotal()); } finally { leaveBusy(); } }
public ContentSummary build() { return new ContentSummary(length, fileCount, directoryCount, quota, spaceConsumed, spaceQuota, typeConsumed, typeQuota); }
public ContentSummary build() { return new ContentSummary(length, fileCount, directoryCount, quota, spaceConsumed, spaceQuota, typeConsumed, typeQuota); }
public ContentSummary build() { return new ContentSummary(length, fileCount, directoryCount, quota, spaceConsumed, spaceQuota, typeConsumed, typeQuota); }
public ContentSummary build() { return new ContentSummary(length, fileCount, directoryCount, quota, spaceConsumed, spaceQuota, typeConsumed, typeQuota); }
/** Return the {@link ContentSummary} of a given {@link Path}. */ public ContentSummary getContentSummary(Path f) throws IOException { FileStatus status = getFileStatus(f); if (!status.isDir()) { // f is a file return new ContentSummary(status.getLen(), 1, 0); } // f is a directory long[] summary = {0, 0, 1}; for(FileStatus s : listStatus(f)) { ContentSummary c = s.isDir() ? getContentSummary(s.getPath()) : new ContentSummary(s.getLen(), 1, 0); summary[0] += c.getLength(); summary[1] += c.getFileCount(); summary[2] += c.getDirectoryCount(); } return new ContentSummary(summary[0], summary[1], summary[2]); }
/** Return the {@link ContentSummary} of a given {@link Path}. */ public ContentSummary getContentSummary(Path f) throws IOException { FileStatus status = getFileStatus(f); if (!status.isDir()) { // f is a file return new ContentSummary(status.getLen(), 1, 0); } // f is a directory long[] summary = {0, 0, 1}; for(FileStatus s : listStatus(f)) { ContentSummary c = getContentSummary(s.getPath()); summary[0] += c.getLength(); summary[1] += c.getFileCount(); summary[2] += c.getDirectoryCount(); } return new ContentSummary(summary[0], summary[1], summary[2]); }
@Override public ContentSummary getContentSummary(Path f) throws IOException { // HarFileSystem has a bug where this method does not work properly // if the underlying FS is HDFS. See MAPREDUCE-1877 for more // information. This method is from FileSystem. FileStatus status = getFileStatus(f); if (!status.isDir()) { // f is a file return new ContentSummary(status.getLen(), 1, 0); } // f is a directory long[] summary = {0, 0, 1}; for(FileStatus s : listStatus(f)) { ContentSummary c = s.isDir() ? getContentSummary(s.getPath()) : new ContentSummary(s.getLen(), 1, 0); summary[0] += c.getLength(); summary[1] += c.getFileCount(); summary[2] += c.getDirectoryCount(); } return new ContentSummary(summary[0], summary[1], summary[2]); } }
@Override public ContentSummary getContentSummary(Path f) throws IOException { // HarFileSystem has a bug where this method does not work properly // if the underlying FS is HDFS. See MAPREDUCE-1877 for more // information. This method is from FileSystem. FileStatus status = getFileStatus(f); if (!status.isDir()) { // f is a file return new ContentSummary(status.getLen(), 1, 0); } // f is a directory long[] summary = {0, 0, 1}; for(FileStatus s : listStatus(f)) { ContentSummary c = s.isDir() ? getContentSummary(s.getPath()) : new ContentSummary(s.getLen(), 1, 0); summary[0] += c.getLength(); summary[1] += c.getFileCount(); summary[2] += c.getDirectoryCount(); } return new ContentSummary(summary[0], summary[1], summary[2]); } }
@Override public ContentSummary getContentSummary(Path f) throws IOException { // HarFileSystem has a bug where this method does not work properly // if the underlying FS is HDFS. See MAPREDUCE-1877 for more // information. This method is from FileSystem. FileStatus status = getFileStatus(f); if (!status.isDir()) { // f is a file return new ContentSummary(status.getLen(), 1, 0); } // f is a directory long[] summary = {0, 0, 1}; for(FileStatus s : listStatus(f)) { ContentSummary c = s.isDir() ? getContentSummary(s.getPath()) : new ContentSummary(s.getLen(), 1, 0); summary[0] += c.getLength(); summary[1] += c.getFileCount(); summary[2] += c.getDirectoryCount(); } return new ContentSummary(summary[0], summary[1], summary[2]); }
/** * @return an array of three longs. * 0: length, 1: file count, 2: directory count 3: disk space */ abstract long[] computeContentSummary(long[] summary);
/** * @return an array of three longs. * 0: length, 1: file count, 2: directory count 3: disk space */ abstract long[] computeContentSummary(long[] summary);