/** * A wrapper around {@link FileSystem#delete(Path, boolean)} that only deletes a given {@link Path} if it is present * on the given {@link FileSystem}. */ public static void deleteIfExists(FileSystem fs, Path path, boolean recursive) throws IOException { if (fs.exists(path)) { deletePath(fs, path, recursive); } }
public static void deletePathAndEmptyAncestors(FileSystem fs, Path f, boolean recursive) throws IOException { deletePath(fs, f, recursive); Path parent = f.getParent(); while (parent != null) { if (fs.exists(parent) && fs.listStatus(parent).length == 0) { deletePath(fs, parent, true); parent = parent.getParent(); } else { break; } } }
@Override public Void call() throws Exception { Lock lock = ParallelRunner.this.locks.get(path.toString()); lock.lock(); try { HadoopUtils.deletePath(ParallelRunner.this.fs, path, recursive); return null; } finally { lock.unlock(); } } }), "Delete path " + path));
public static void writeSchemaToFile(Schema schema, Path filePath, FileSystem fs, boolean overwrite, FsPermission perm) throws IOException { if (!overwrite) { Preconditions.checkState(!fs.exists(filePath), filePath + " already exists"); } else { HadoopUtils.deletePath(fs, filePath, true); } try (DataOutputStream dos = fs.create(filePath)) { dos.writeChars(schema.toString()); } fs.setPermission(filePath, perm); }
private void deleteFilesOnSource(WorkUnitState state) throws IOException { CopyEntity copyEntity = CopySource.deserializeCopyEntity(state); if (copyEntity instanceof CopyableFile) { HadoopUtils.deletePath(this.sourceFs, ((CopyableFile) copyEntity).getOrigin().getPath(), true); HadoopUtils.deletePath(this.sourceFs, PathUtils.addExtension(((CopyableFile) copyEntity).getOrigin().getPath(), ReadyCopyableFileFilter.READY_EXTENSION), true); } } }
@Override public Void call() throws Exception { Configuration conf = new Configuration(ParallelRunner.this.fs.getConf()); WritableShimSerialization.addToHadoopConfiguration(conf); try (@SuppressWarnings("deprecation") SequenceFile.Reader reader = new SequenceFile.Reader( ParallelRunner.this.fs, inputFilePath, conf)) { Writable key = keyClass.newInstance(); T state = stateClass.newInstance(); while (reader.next(key)) { state = (T) reader.getCurrentValue(state); states.add(state); state = stateClass.newInstance(); } if (deleteAfter) { HadoopUtils.deletePath(ParallelRunner.this.fs, inputFilePath, false); } } return null; } }), "Deserialize state from file " + inputFilePath));
HadoopUtils.deletePath(fs, jobStagingPath, true); HadoopUtils.deletePath(fs, jobStagingPath.getParent(), true); HadoopUtils.deletePath(fs, jobOutputPath, true); HadoopUtils.deletePath(fs, jobOutputPath.getParent(), true);
if (overwriteDst && dstFs.exists(dst)) { try { deletePath(dstFs, dst, true); } finally { renamePath(dstFs, tmp, dst); deletePath(dstFs, tmp, true);