public static void setClassLoaderAndJar(JobConf conf, Class jobClass) { conf.setClassLoader(Thread.currentThread().getContextClassLoader()); String jar = HadoopUtils.findContainingJar(jobClass, Thread.currentThread() .getContextClassLoader()); if(jar != null) { conf.setJar(jar); } } }
loader = Utilities.addToClassPath(loader, StringUtils.split(libjars, ",")); conf.setClassLoader(loader);
loader = Utilities.addToClassPath(loader, StringUtils.split(libjars, ",")); conf.setClassLoader(loader);
public static JobConf configureHDFSJobConf(Map<String, String> configuration) { JobConf conf = new JobConf(); String localShortCircuitSocketPath = configuration.get(ExternalDataConstants.KEY_LOCAL_SOCKET_PATH); String formatClassName = HDFSUtils.getInputFormatClassName(configuration); conf.set(ExternalDataConstants.KEY_HADOOP_FILESYSTEM_URI, configuration.get(ExternalDataConstants.KEY_HDFS_URL).trim()); conf.set(ExternalDataConstants.KEY_HADOOP_FILESYSTEM_CLASS, ExternalDataConstants.CLASS_NAME_HDFS_FILESYSTEM); conf.setClassLoader(HDFSInputStream.class.getClassLoader()); conf.set(ExternalDataConstants.KEY_HADOOP_INPUT_DIR, configuration.get(ExternalDataConstants.KEY_PATH).trim()); conf.set(ExternalDataConstants.KEY_HADOOP_INPUT_FORMAT, formatClassName); // Enable local short circuit reads if user supplied the parameters if (localShortCircuitSocketPath != null) { conf.set(ExternalDataConstants.KEY_HADOOP_SHORT_CIRCUIT, "true"); conf.set(ExternalDataConstants.KEY_HADOOP_SOCKET_PATH, localShortCircuitSocketPath.trim()); } return conf; }
private JobConf getConf(JobConf job) { JobConf conf = new JobConf(job); FileInputFormat.setInputPaths(conf, indir); conf.setClassLoader(job.getClassLoader()); return conf; }
private JobConf getConf(JobConf job) { JobConf conf = new JobConf(job); FileInputFormat.setInputPaths(conf, indir); conf.setClassLoader(job.getClassLoader()); return conf; }
private JobConf getConf(JobConf job) { JobConf conf = new JobConf(job); FileInputFormat.setInputPaths(conf, indir); conf.setClassLoader(job.getClassLoader()); return conf; }
private JobConf getConf(JobConf job) { JobConf conf = new JobConf(job); FileInputFormat.setInputPaths(conf, indir); conf.setClassLoader(job.getClassLoader()); return conf; }
private JobConf getConf(JobConf job) { JobConf conf = new JobConf(job); FileInputFormat.setInputPaths(conf, indir); conf.setClassLoader(job.getClassLoader()); return conf; }
private void configureClasspath(JobConf conf) throws IOException { // get the task and the current classloader which will become the parent Task task = ReduceTask.this; ClassLoader parent = conf.getClassLoader(); // get the work directory which holds the elements we are dynamically // adding to the classpath File workDir = new File(task.getJobFile()).getParentFile(); ArrayList<URL> urllist = new ArrayList<URL>(); // add the jars and directories to the classpath String jar = conf.getJar(); if (jar != null) { File jobCacheDir = new File(new Path(jar).getParent().toString()); File[] libs = new File(jobCacheDir, "lib").listFiles(); if (libs != null) { for (int i = 0; i < libs.length; i++) { urllist.add(libs[i].toURL()); } } urllist.add(new File(jobCacheDir, "classes").toURL()); urllist.add(jobCacheDir.toURL()); } urllist.add(workDir.toURL()); // create a new classloader with the old classloader as its parent // then set that classloader as the one used by the current jobconf URL[] urls = urllist.toArray(new URL[urllist.size()]); URLClassLoader loader = new URLClassLoader(urls, parent); conf.setClassLoader(loader); }
private static JobConf configureJobConf() throws Exception { JobConf conf = new JobConf(); String hdfsUrl = "hdfs://127.0.0.1:31888"; String hdfsPath = "/asterix/extrasmalltweets.txt"; conf.set("fs.default.name", hdfsUrl); conf.set("fs.hdfs.impl", DistributedFileSystem.class.getName()); conf.setClassLoader(GenericAdapter.class.getClassLoader()); conf.set("mapred.input.dir", hdfsPath); conf.set("mapred.input.format.class", TextInputFormat.class.getName()); return conf; }
@SuppressWarnings("unchecked") @Override public void configure(Configuration config) { this.jobConf = HadoopUtil.asJobConfInstance(FlinkConfigConverter.toHadoopConfig(config)); // set the correct class loader // not necessary for Flink versions >= 0.10 but we set this anyway to be on the safe side jobConf.setClassLoader(this.getClass().getClassLoader()); this.mapredInputFormat = jobConf.getInputFormat(); if (this.mapredInputFormat instanceof JobConfigurable) { ((JobConfigurable) this.mapredInputFormat).configure(jobConf); } }
/** * Tests the class loader set by {@link JobConf#setClassLoader(ClassLoader)} * is inherited by any {@link WrappedRecordReader}s created by * {@link CompositeRecordReader} */ public void testClassLoader() throws Exception { JobConf job = new JobConf(); Fake_ClassLoader classLoader = new Fake_ClassLoader(); job.setClassLoader(classLoader); assertTrue(job.getClassLoader() instanceof Fake_ClassLoader); FileSystem fs = FileSystem.get(job); Path testdir = new Path(System.getProperty("test.build.data", "/tmp")) .makeQualified(fs); Path base = new Path(testdir, "/empty"); Path[] src = { new Path(base, "i0"), new Path("i1"), new Path("i2") }; job.set("mapreduce.join.expr", CompositeInputFormat.compose("outer", IF_ClassLoaderChecker.class, src)); CompositeInputFormat<NullWritable> inputFormat = new CompositeInputFormat<NullWritable>(); inputFormat.getRecordReader(inputFormat.getSplits(job, 1)[0], job, Reporter.NULL); }
conf.setClassLoader(classLoader);
conf.setClassLoader(classLoader);
makeClassLoader(conf, new File(workDirName.toString())); Thread.currentThread().setContextClassLoader(classLoader); conf.setClassLoader(classLoader);
/** * Tests the class loader set by {@link JobConf#setClassLoader(ClassLoader)} * is inherited by any {@link WrappedRecordReader}s created by * {@link CompositeRecordReader} */ public void testClassLoader() throws Exception { JobConf job = new JobConf(); Fake_ClassLoader classLoader = new Fake_ClassLoader(); job.setClassLoader(classLoader); assertTrue(job.getClassLoader() instanceof Fake_ClassLoader); FileSystem fs = FileSystem.get(job); Path testdir = new Path(System.getProperty("test.build.data", "/tmp")) .makeQualified(fs); Path base = new Path(testdir, "/empty"); Path[] src = { new Path(base, "i0"), new Path("i1"), new Path("i2") }; job.set("mapreduce.join.expr", CompositeInputFormat.compose("outer", IF_ClassLoaderChecker.class, src)); CompositeInputFormat<NullWritable> inputFormat = new CompositeInputFormat<NullWritable>(); inputFormat.getRecordReader(inputFormat.getSplits(job, 1)[0], job, Reporter.NULL); }
conf.setJar(jar.getURI().toString()); loader = ExecutionUtils.createParentLastClassLoader(jar, beanClassLoader, cfg); conf.setClassLoader(loader);
Thread.currentThread().setContextClassLoader(ctx.getJobletContext().getClassLoader()); JobConf conf = confFactory.getConf(); conf.setClassLoader(ctx.getJobletContext().getClassLoader()); IKeyValueParser parser = tupleParserFactory.createKeyValueParser(ctx); try {