public Builder<T, K, V> setConfiguration(@Nullable Configuration configuration) { if (configuration == null) { configuration = new Configuration(false); } return this.setSerializableConfiguration(new SerializableConfiguration(configuration)); } public abstract Builder<T, K, V> setUsername(String username);
public Configuration getConfiguration() { return serializableConfiguration.get(); }
@Test public void testCreateNewConfiguration() { Configuration confFromNull = SerializableConfiguration.newConfiguration(null); assertNotNull(confFromNull); Configuration conf = SerializableConfiguration.newConfiguration(new SerializableConfiguration(confFromNull)); assertNotNull(conf); }
@Test public void testConstruction() { assertNotNull(DEFAULT_SERIALIZABLE_CONF); assertNotNull(DEFAULT_SERIALIZABLE_CONF.get()); thrown.expect(NullPointerException.class); new SerializableConfiguration(null); }
@Override public Long run() throws Exception { long size = 0; Job job = SerializableConfiguration.newJob(serializableConfiguration()); for (FileStatus st : listStatus(createFormat(job), job)) { size += st.getLen(); } return size; } });
@Override public Void run() throws Exception { FileSystem fs = FileSystem.get(new URI(path()), SerializableConfiguration.newConfiguration(serializableConfiguration())); checkState(!fs.exists(new Path(path())), "Output path %s already exists", path()); return null; } });
@Test public void testSerializationDeserialization() { Configuration conf = new Configuration(); conf.set("hadoop.silly.test", "test-value"); byte[] object = SerializationUtils.serialize(new SerializableConfiguration(conf)); SerializableConfiguration serConf = SerializationUtils.deserialize(object); assertNotNull(serConf); assertEquals("test-value", serConf.get().get("hadoop.silly.test")); }
private Job newJob() throws IOException { Job job = SerializableConfiguration.newJob(serializableConfiguration()); job.setJobID(jobId); job.setOutputKeyClass(keyClass()); job.setOutputValueClass(valueClass()); return job; }
@Override public Void run() throws Exception { FileSystem fs = FileSystem.get(new URI(filepattern()), SerializableConfiguration.newConfiguration(serializableConfiguration())); FileStatus[] fileStatuses = fs.globStatus(new Path(filepattern())); checkState( fileStatuses != null && fileStatuses.length > 0, "Unable to find any files matching %s", filepattern()); return null; } });
public Configuration getConfiguration() { return serializableConfiguration.get(); }
@Test public void testValidateConfigurationWithDBInputFormat() { Configuration conf = new Configuration(); conf.setClass("key.class", LongWritable.class, Object.class); conf.setClass("value.class", Text.class, Object.class); conf.setClass("mapreduce.job.inputformat.class", DBInputFormat.class, InputFormat.class); thrown.expect(IllegalArgumentException.class); HadoopInputFormatIO.<String, String>read() .withConfiguration(new SerializableConfiguration(conf).get()) .withKeyTranslation(myKeyTranslate) .withValueTranslation(myValueTranslate); }
public Builder<T, K, V> setConfiguration(Configuration configuration) { if (configuration == null) { configuration = new Configuration(false); } return this.setSerializableConfiguration(new SerializableConfiguration(configuration)); } public abstract Builder<T, K, V> setSerializableSplit(SerializableSplit serializableSplit);
HDFSFileReader( HDFSFileSource<T, K, V> source, String filepattern, Class<? extends FileInputFormat<K, V>> formatClass, SerializableSplit serializableSplit) throws IOException { this.source = source; this.filepattern = filepattern; this.formatClass = formatClass; this.job = SerializableConfiguration.newJob(source.serializableConfiguration()); if (serializableSplit != null) { this.splits = ImmutableList.of(serializableSplit.getSplit()); this.splitsIterator = splits.listIterator(); } }
/** Returns new populated {@link Configuration} object. */ public static Configuration newConfiguration(@Nullable SerializableConfiguration conf) { if (conf == null) { return new Configuration(); } else { return conf.get(); } } }
@Test public void testValidateConfigurationWithDBInputFormat() { Configuration conf = new Configuration(); conf.setClass("key.class", LongWritable.class, Object.class); conf.setClass("value.class", Text.class, Object.class); conf.setClass("mapreduce.job.inputformat.class", DBInputFormat.class, InputFormat.class); thrown.expect(IllegalArgumentException.class); HadoopFormatIO.<String, String>read() .withConfiguration(new SerializableConfiguration(conf).get()) .withKeyTranslation(myKeyTranslate) .withValueTranslation(myValueTranslate); }
/** Reads from the HBase instance indicated by the* given configuration. */ public ReadAll withConfiguration(Configuration configuration) { checkArgument(configuration != null, "configuration can not be null"); return new ReadAll(new SerializableConfiguration(configuration)); }
/** Returns new configured {@link Job} object. */ public static Job newJob(@Nullable SerializableConfiguration conf) throws IOException { if (conf == null) { return Job.getInstance(); } else { // Don't reading configuration from slave thread, but only from master thread. Job job = Job.getInstance(new Configuration(false)); for (Map.Entry<String, String> entry : conf.get()) { job.getConfiguration().set(entry.getKey(), entry.getValue()); } return job; } }
/** Writes to the HBase instance indicated by the* given Configuration. */ public Write withConfiguration(Configuration configuration) { checkArgument(configuration != null, "configuration can not be null"); return new Write(new SerializableConfiguration(configuration), tableId); }
private List<InputSplit> computeSplits(long desiredBundleSizeBytes, SerializableConfiguration serializableConfiguration) throws IOException, IllegalAccessException, InstantiationException { Job job = SerializableConfiguration.newJob(serializableConfiguration); FileInputFormat.setMinInputSplitSize(job, desiredBundleSizeBytes); FileInputFormat.setMaxInputSplitSize(job, desiredBundleSizeBytes); return createFormat(job).getSplits(job); }