public InputFormatReader(FileSystem fs, Path path, DatasetDescriptor descriptor) { this.fs = fs; this.path = path; this.descriptor = descriptor; this.state = ReaderWriterState.NEW; // set up the configuration from the descriptor properties this.conf = new Configuration(fs.getConf()); for (String prop : descriptor.listProperties()) { conf.set(prop, descriptor.getProperty(prop)); } this.attemptContext = Hadoop.TaskAttemptContext.ctor.newInstance(conf, FAKE_ID); }
@Override @SuppressWarnings("unchecked") public <R> R invoke(Object target, Object... args) { Preconditions.checkArgument(target == null, "Invalid call to constructor: target must be null"); return (R) newInstance(target, args); }
.impl(transform) .buildChecked(); transformFn = ctor.newInstance(); } catch (NoSuchMethodException e) { throw new DatasetException(
.impl(transform) .buildChecked(); transformFn = ctor.newInstance(); } catch (NoSuchMethodException e) { throw new DatasetException(
.impl(transform) .buildChecked(); transformFn = ctor.newInstance(); } catch (NoSuchMethodException e) { throw new DatasetException(
.impl(transform) .buildChecked(); transformFn = ctor.newInstance(); } catch (NoSuchMethodException e) { throw new DatasetException(
private static Configuration newHiveConf(Configuration base) { checkHiveDependencies(); // ensure HIVE_CONF is present Configuration conf = HIVE_CONF.newInstance(base, HIVE_CONF.getConstructedClass()); // Add everything in base back in to work around a bug in HiveConf HiveUtils.addResource(conf, base); return conf; }
@Override @SuppressWarnings("unchecked") public <R> R invoke(Object target, Object... args) { Preconditions.checkArgument(target == null, "Invalid call to constructor: target must be null"); return (R) newInstance(target, args); }
private static Configuration newHiveConf(Configuration base) { checkHiveDependencies(); // ensure HIVE_CONF is present Configuration conf = HIVE_CONF.newInstance(base, HIVE_CONF.getConstructedClass()); // Add everything in base back in to work around a bug in HiveConf HiveUtils.addResource(conf, base); return conf; }
public static <K, V> FileInputFormat<K, V> newInputFormatInstance( DatasetDescriptor descriptor) { DynConstructors.Ctor<FileInputFormat<K, V>> ctor = new DynConstructors.Builder() .impl(descriptor.getProperty(INPUT_FORMAT_CLASS_PROP)) .build(); return ctor.newInstance(); }
@Test public void testSetupJobIsIdempotent() { DatasetKeyOutputFormat.MergeOutputCommitter<Object> outputCommitter = new DatasetKeyOutputFormat.MergeOutputCommitter<Object>(); Configuration conf = DefaultConfiguration.get(); DatasetKeyOutputFormat.configure(conf).appendTo(outputDataset); JobID jobId = new JobID("jt", 42); JobContext context = Hadoop.JobContext.ctor.newInstance(conf, jobId); // setup the job outputCommitter.setupJob(context); // call setup again to simulate an ApplicationMaster restart outputCommitter.setupJob(context); }