@Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } AbstractFileReader that = (AbstractFileReader) o; return !(file != null ? !file.equals(that.file) : that.file != null); }
@Override public boolean shouldStartNewBucket(Path basePath, Path currentBucketPath) { String newDateTimeString = dateFormatter.format(new Date(clock.currentTimeMillis())); return !(new Path(basePath, newDateTimeString).equals(currentBucketPath)); }
public LocationHandle( Path targetPath, Path writePath, boolean isExistingTable, WriteMode writeMode) { if (writeMode.isWritePathSameAsTargetPath() && !targetPath.equals(writePath)) { throw new IllegalArgumentException(format("targetPath is expected to be same as writePath for writeMode %s", writeMode)); } this.targetPath = requireNonNull(targetPath, "targetPath is null"); this.writePath = requireNonNull(writePath, "writePath is null"); this.isExistingTable = isExistingTable; this.writeMode = requireNonNull(writeMode, "writeMode is null"); }
@Override public boolean equals(final Object obj) { if (!(obj instanceof MockFile)) { return false; } return ((MockFile) obj).path.equals(this.path) && ((MockFile) obj).length == this.length; }
private static boolean isPathWithinSubtree(Path path, Path subtree, int subtreeDepth) { while(path != null){ if (subtreeDepth > path.depth()) { return false; } if(subtree.equals(path)){ return true; } path = path.getParent(); } return false; }
@Override public boolean equals(final Object obj) { if (!(obj instanceof MockFile)) { return false; } return ((MockFile) obj).path.equals(this.path) && ((MockFile) obj).length == this.length; }
private static boolean isSameOrParent(Path parent, Path child) { int parentDepth = parent.depth(); int childDepth = child.depth(); if (parentDepth > childDepth) { return false; } for (int i = childDepth; i > parentDepth; i--) { child = child.getParent(); } return parent.equals(child); }
private static boolean isChildDirectory(Path parentDirectory, Path childDirectory) { if (parentDirectory.equals(childDirectory)) { return true; } if (childDirectory.isRoot()) { return false; } return isChildDirectory(parentDirectory, childDirectory.getParent()); } }
private static boolean isValidWALRootDir(Path walDir, final Configuration c) throws IOException { Path rootDir = getRootDir(c); FileSystem fs = walDir.getFileSystem(c); Path qualifiedWalDir = walDir.makeQualified(fs.getUri(), fs.getWorkingDirectory()); if (!qualifiedWalDir.equals(rootDir)) { if (qualifiedWalDir.toString().startsWith(rootDir.toString() + "/")) { throw new IllegalStateException("Illegal WAL directory specified. " + "WAL directories are not permitted to be under the root directory if set."); } } return true; }
private EncryptionZone getEncryptionZoneForPath(Path path) throws IOException { if (path.getFileSystem(conf).exists(path)) { return hdfsAdmin.getEncryptionZoneForPath(path); } else if (!path.getParent().equals(path)) { return getEncryptionZoneForPath(path.getParent()); } else { return null; } }
/** * Determines if the given workingDir is a subdirectory of the default working snapshot directory * @param workingDir a directory to check * @param conf configuration for the HBase cluster * @return true if the given workingDir is a subdirectory of the default working directory for * snapshots, false otherwise */ public static boolean isWithinDefaultWorkingDir(final Path workingDir, Configuration conf) { Path defaultWorkingDir = getDefaultWorkingSnapshotDir(new Path(conf.get(HConstants.HBASE_DIR))); return workingDir.equals(defaultWorkingDir) || isSubDirectoryOf(workingDir, defaultWorkingDir); }
private void handleFileNotFound(Path path, FileNotFoundException fnfe) throws IOException { // If the log was archived, continue reading from there Path archivedLog = getArchivedLog(path); if (!path.equals(archivedLog)) { openReader(archivedLog); } else { throw fnfe; } }
private static boolean contains(Iterable<FileStatus> files, Path filePath) { for (FileStatus status: files) { LOG.debug("debug in contains, 3.1: " + status.getPath() + " filePath:" + filePath); if (filePath.equals(status.getPath())) { return true; } } return false; }
private void prepareInsertExistingTable(HdfsContext context, TableAndMore tableAndMore) { deleteOnly = false; Table table = tableAndMore.getTable(); Path targetPath = new Path(table.getStorage().getLocation()); Path currentPath = tableAndMore.getCurrentLocation().get(); cleanUpTasksForAbort.add(new DirectoryCleanUpTask(context, targetPath, false)); if (!targetPath.equals(currentPath)) { asyncRename(hdfsEnvironment, renameExecutor, fileRenameCancelled, fileRenameFutures, context, currentPath, targetPath, tableAndMore.getFileNames().get()); } updateStatisticsOperations.add(new UpdateStatisticsOperation( new SchemaTableName(table.getDatabaseName(), table.getTableName()), Optional.empty(), tableAndMore.getStatisticsUpdate(), true)); }
private static void updatePathForMapWork(Path newPath, MapWork work, Path path) { // update the work if (!newPath.equals(path)) { PartitionDesc partDesc = work.getPathToPartitionInfo().get(path); work.addPathToAlias(newPath, work.getPathToAliases().get(path)); work.removePathToAlias(path); work.removePathToPartitionInfo(path); work.addPathToPartitionInfo(newPath, partDesc); } }
private void prepareInsertExistingPartition(HdfsContext context, PartitionAndMore partitionAndMore) { deleteOnly = false; Partition partition = partitionAndMore.getPartition(); Path targetPath = new Path(partition.getStorage().getLocation()); Path currentPath = partitionAndMore.getCurrentLocation(); cleanUpTasksForAbort.add(new DirectoryCleanUpTask(context, targetPath, false)); if (!targetPath.equals(currentPath)) { asyncRename(hdfsEnvironment, renameExecutor, fileRenameCancelled, fileRenameFutures, context, currentPath, targetPath, partitionAndMore.getFileNames()); } updateStatisticsOperations.add(new UpdateStatisticsOperation( new SchemaTableName(partition.getDatabaseName(), partition.getTableName()), Optional.of(getPartitionName(partition.getDatabaseName(), partition.getTableName(), partition.getValues())), partitionAndMore.getStatisticsUpdate(), true)); }
@Override public void triggerConflict(ConnectorSession session, SchemaTableName tableName, ConnectorInsertTableHandle insertTableHandle, List<PartitionUpdate> partitionUpdates) { Path writePath = getStagingPathRoot(insertTableHandle); Path targetPath = getTargetPathRoot(insertTableHandle); if (writePath.equals(targetPath)) { // This conflict does not apply. Trigger a rollback right away so that this test case passes. throw new TestingRollbackException(); } path = new Path(targetPath + "/pk1=b/pk2=add2"); context = new HdfsContext(session, tableName.getSchemaName(), tableName.getTableName()); createDirectory(context, hdfsEnvironment, path); }
@Override public boolean equals(Object obj) { if (this == obj) return true; if (!(obj instanceof FsWithHash)) return false; FsWithHash other = (FsWithHash)obj; if ((fs == null) != (other.fs == null)) return false; if (fs == null && other.fs == null) return true; return fs.getStart() == other.fs.getStart() && fs.getLength() == other.fs.getLength() && fs.getPath().equals(other.fs.getPath()); } }