/** * Look up a file system abstraction using the configuration provided * * @param uri Uri of filesystem * @param conf Configuration properties * @param user User to run. Null -> currentUser() * @return A File system abstraction configured with the properties found in {@code conf} * @throws IOException Error looking up/creating the file system */ default FileSystem getFileSystem( URI uri, Configuration conf, String user ) throws IOException, InterruptedException { return getFileSystem( conf ); }
@Override public FileSystem getFileSystem( Configuration conf ) throws IOException { return delegate.getFileSystem( conf ); }
@Override public FileSystem getFileSystem( URI uri, Configuration conf, String user ) throws IOException, InterruptedException { return delegate.getFileSystem( uri, conf, user ); }
@Override public FileSystem getFileSystem() { try { return (FileSystem) hadoopShim.getFileSystem( finalUri, configuration, null ).getDelegate(); } catch ( IOException e ) { LOGGER.debug( "Error looking up/creating the file system ", e ); return null; } catch ( InterruptedException e ) { LOGGER.debug( "Error looking up/creating the file system ", e ); return null; } } } );
@Override public HadoopFileSystem create( NamedCluster namedCluster, URI uri ) throws IOException { final URI finalUri = uri != null ? uri : URI.create( "" ); final HadoopShim hadoopShim = hadoopConfiguration.getHadoopShim(); final Configuration configuration = hadoopShim.createConfiguration(); FileSystem fileSystem = (FileSystem) hadoopShim.getFileSystem( configuration ).getDelegate(); if ( fileSystem instanceof LocalFileSystem ) { LOGGER.error( "Got a local filesystem, was expecting an hdfs connection" ); throw new IOException( "Got a local filesystem, was expecting an hdfs connection" ); } return new HadoopFileSystemImpl( new HadoopFileSystemCallable() { @Override public FileSystem getFileSystem() { try { return (FileSystem) hadoopShim.getFileSystem( finalUri, configuration, null ).getDelegate(); } catch ( IOException e ) { LOGGER.debug( "Error looking up/creating the file system ", e ); return null; } catch ( InterruptedException e ) { LOGGER.debug( "Error looking up/creating the file system ", e ); return null; } } } ); } }
protected void configure( Configuration conf ) throws Exception { FileSystem fs = hadoopShim.getFileSystem( conf ); URL[] urls = new URL[] { resolvedJarUrl }; URLClassLoader loader = new URLClassLoader( urls, hadoopShim.getClass().getClassLoader() );
@VisibleForTesting void cleanOutputPath( Configuration conf ) throws IOException { if ( cleanOutputPath ) { FileSystem fs = hadoopShim.getFileSystem( conf ); Path path = getOutputPath( conf, fs ); String outputPath = path.toUri().toString(); if ( log.isBasic() ) { log.logBasic( BaseMessages.getString( PKG, JOB_ENTRY_HADOOP_TRANS_JOB_EXECUTOR_CLEANING_OUTPUT_PATH, outputPath ) ); } try { if ( !fs.exists( path ) ) { // If the path does not exist one could think of it as "already cleaned" return; } if ( !fs.delete( path, true ) ) { if ( log.isBasic() ) { log.logBasic( BaseMessages .getString( PKG, JOB_ENTRY_HADOOP_TRANS_JOB_EXECUTOR_FAILED_TO_CLEAN_OUTPUT_PATH, outputPath ) ); } } } catch ( IOException ex ) { throw new IOException( BaseMessages.getString( PKG, JOB_ENTRY_HADOOP_TRANS_JOB_EXECUTOR_ERROR_CLEANING_OUTPUT_PATH, outputPath ), ex ); } } }
cleanOutputPath( conf ); FileSystem fs = hadoopShim.getFileSystem( conf );