Refine search
private CompressionCodec createCompressionCodec(String codecName) { try { Class<? extends CompressionCodec> codecClass = classLoader.loadClass(codecName).asSubclass(CompressionCodec.class); Constructor<? extends CompressionCodec> constructor = codecClass.getDeclaredConstructor(); constructor.setAccessible(true); CompressionCodec codec = constructor.newInstance(); if (codec instanceof Configurable) { // Hadoop is crazy... you have to give codecs an empty configuration or they throw NPEs // but you need to make sure the configuration doesn't "load" defaults or it spends // forever loading XML with no useful information ((Configurable) codec).setConf(new Configuration(false)); } return codec; } catch (ReflectiveOperationException e) { throw new IllegalArgumentException("Unknown codec: " + codecName, e); } } }
/** * Creates a compression stream without any additional wrapping into * buffering streams. */ public CompressionOutputStream createPlainCompressionStream( OutputStream downStream, Compressor compressor) throws IOException { CompressionCodec codec = getCodec(conf); ((Configurable)codec).getConf().setInt("io.file.buffer.size", 32 * 1024); return codec.createOutputStream(downStream, compressor); }
private static void initPolicy(VolumeChoosingPolicy<FsVolumeSpi> policy, float preferencePercent) { Configuration conf = new Configuration(); // Set the threshold to consider volumes imbalanced to 1MB conf.setLong( DFS_DATANODE_AVAILABLE_SPACE_VOLUME_CHOOSING_POLICY_BALANCED_SPACE_THRESHOLD_KEY, 1024 * 1024); // 1MB conf.setFloat( DFS_DATANODE_AVAILABLE_SPACE_VOLUME_CHOOSING_POLICY_BALANCED_SPACE_PREFERENCE_FRACTION_KEY, preferencePercent); ((Configurable) policy).setConf(conf); }
private static Optional<EncryptionMaterialsProvider> createEncryptionMaterialsProvider(Configuration hadoopConfig) { String kmsKeyId = hadoopConfig.get(S3_KMS_KEY_ID); if (kmsKeyId != null) { return Optional.of(new KMSEncryptionMaterialsProvider(kmsKeyId)); } String empClassName = hadoopConfig.get(S3_ENCRYPTION_MATERIALS_PROVIDER); if (empClassName == null) { return Optional.empty(); } try { Object instance = Class.forName(empClassName).getConstructor().newInstance(); if (!(instance instanceof EncryptionMaterialsProvider)) { throw new RuntimeException("Invalid encryption materials provider class: " + instance.getClass().getName()); } EncryptionMaterialsProvider emp = (EncryptionMaterialsProvider) instance; if (emp instanceof Configurable) { ((Configurable) emp).setConf(hadoopConfig); } return Optional.of(emp); } catch (ReflectiveOperationException e) { throw new RuntimeException("Unable to load or create S3 encryption materials provider: " + empClassName, e); } }
@Override public void setConf(Configuration conf) { if (this.tool != null && Configurable.class.isAssignableFrom(this.tool.getClass())) { ((Configurable)this.tool).setConf(conf); } if (this.context != null) { // Put the ID into the context in case a job needs it. conf.set(ToolContextBase.CONF_KEY_ID, this.context.getId()); if (this.context.getAnnotatedBean() != null && Configurable.class.isAssignableFrom(this.context.getAnnotatedBean().getClass())) { ((Configurable)this.context.getAnnotatedBean()).setConf(conf); } } super.setConf(conf); }
@Override public void configure(Configuration parameters) { // enforce sequential configure() calls synchronized (CONFIGURE_MUTEX) { if (this.mapreduceOutputFormat instanceof Configurable) { ((Configurable) this.mapreduceOutputFormat).setConf(this.configuration); } } }
public CombineFileRecordReaderWrapper( CombineFileSplit split, Configuration conf, Reporter reporter, Integer idx ) throws Exception { FileSplit fileSplit = new FileSplit( split.getPath( idx ), split.getOffset( idx ), split.getLength( idx ), split.getLocations() ); Class<?> clz = conf.getClass( INDIVIDUAL_INPUT_FORMAT, null ); FileInputFormat<K, V> inputFormat = (FileInputFormat<K, V>) clz.newInstance(); if( inputFormat instanceof Configurable ) ( (Configurable) inputFormat ).setConf( conf ); delegate = inputFormat.getRecordReader( fileSplit, (JobConf) conf, reporter ); }
try { Class<? extends CompressionCodec> codecClass = conf.getClassByName(codecClassname).asSubclass(CompressionCodec.class); this.codec = ReflectionUtils.newInstance(codecClass, conf); } catch (ClassNotFoundException cnfe) { ((Configurable)codec).setConf(conf);
@Override public Configuration getConf() { return ((Configurable)inputFormat).getConf(); }
@Override public void configure(Configuration parameters) { // enforce sequential configuration() calls synchronized (CONFIGURE_MUTEX) { if (mapreduceInputFormat instanceof Configurable) { ((Configurable) mapreduceInputFormat).setConf(configuration); } } }
public CombineFileRecordReaderWrapper( CombineFileSplit split, Configuration conf, Reporter reporter, Integer idx ) throws Exception { FileSplit fileSplit = new FileSplit( split.getPath( idx ), split.getOffset( idx ), split.getLength( idx ), split.getLocations() ); Class<?> clz = conf.getClass( INDIVIDUAL_INPUT_FORMAT, null ); FileInputFormat<K, V> inputFormat = (FileInputFormat<K, V>) clz.newInstance(); if( inputFormat instanceof Configurable ) ( (Configurable) inputFormat ).setConf( conf ); delegate = inputFormat.getRecordReader( fileSplit, (JobConf) conf, reporter ); }
@Override public Configuration getConf() { if (expression instanceof Configurable) { return ((Configurable) expression).getConf(); } return null; }
@VisibleForTesting static CompressionCodec getCodec(String codecName) { Configuration conf = new Configuration(); List<Class<? extends CompressionCodec>> codecs = CompressionCodecFactory.getCodecClasses(conf); ((org.apache.hadoop.conf.Configurable) codec).setConf(conf);
public InputStream createDecompressionStream( InputStream downStream, Decompressor decompressor, int downStreamBufferSize) throws IOException { CompressionCodec codec = getCodec(conf); // Set the internal buffer size to read from down stream. if (downStreamBufferSize > 0) { ((Configurable)codec).getConf().setInt("io.file.buffer.size", downStreamBufferSize); } CompressionInputStream cis = codec.createInputStream(downStream, decompressor); BufferedInputStream bis2 = new BufferedInputStream(cis, DATA_IBUF_SIZE); return bis2; }
@Override public void setConf(Configuration conf) { if (expression instanceof Configurable) { ((Configurable) expression).setConf(conf); } }
/** * Returns the configuration. * * @return Configuration the configuration */ public Configuration getConf() { return this.dn.getConf(); }
public LanczosState runJob(Configuration originalConfig, LanczosState state, int desiredRank, boolean isSymmetric, String outputEigenVectorPathString) throws IOException { ((Configurable) state.getCorpus()).setConf(new Configuration(originalConfig)); setConf(originalConfig); solve(state, desiredRank, isSymmetric); serializeOutput(state, new Path(outputEigenVectorPathString)); return state; }
public InputStream createDecompressionStream( InputStream downStream, Decompressor decompressor, int downStreamBufferSize) throws IOException { CompressionCodec codec = getCodec(conf); // Set the internal buffer size to read from down stream. if (downStreamBufferSize > 0) { ((Configurable)codec).getConf().setInt("io.file.buffer.size", downStreamBufferSize); } CompressionInputStream cis = codec.createInputStream(downStream, decompressor); BufferedInputStream bis2 = new BufferedInputStream(cis, DATA_IBUF_SIZE); return bis2; }
@Override public void configure(Configuration parameters) { // enforce sequential configure() calls synchronized (CONFIGURE_MUTEX) { // configure MR OutputFormat if necessary if (this.mapredOutputFormat instanceof Configurable) { ((Configurable) this.mapredOutputFormat).setConf(this.jobConf); } else if (this.mapredOutputFormat instanceof JobConfigurable) { ((JobConfigurable) this.mapredOutputFormat).configure(this.jobConf); } } }