@SuppressWarnings({"unchecked", "RedundantCast"}) private static Class<? extends InputFormat<?, ?>> getInputFormatClass(JobConf conf, String inputFormatName) throws ClassNotFoundException { // CDH uses different names for Parquet if ("parquet.hive.DeprecatedParquetInputFormat".equals(inputFormatName) || "parquet.hive.MapredParquetInputFormat".equals(inputFormatName)) { return MapredParquetInputFormat.class; } Class<?> clazz = conf.getClassByName(inputFormatName); return (Class<? extends InputFormat<?, ?>>) clazz.asSubclass(InputFormat.class); }
/** * Retrieves a map of {@link Path}s to the {@link AvroMapper} class that * should be used for them. * * @param conf The configuration of the job * @see #addInputPath(JobConf, Path, Class, Schema) * @return A map of paths-to-mappers for the job */ @SuppressWarnings("unchecked") static Map<Path, Class<? extends AvroMapper>> getMapperTypeMap(JobConf conf) { if (conf.get(mappersKey) == null) { return Collections.emptyMap(); } Map<Path, Class<? extends AvroMapper>> m = new HashMap<>(); String[] pathMappings = conf.get(mappersKey).split(","); for (String pathMapping : pathMappings) { String[] split = pathMapping.split(";"); Class<? extends AvroMapper> mapClass; try { mapClass = (Class<? extends AvroMapper>) conf.getClassByName(split[1]); } catch (ClassNotFoundException e) { throw new RuntimeException(e); } m.put(new Path(split[0]), mapClass); } return m; }
public static String getFileExtension(JobConf conf, StorageFormat storageFormat) { // text format files must have the correct extension when compressed if (!HiveConf.getBoolVar(conf, COMPRESSRESULT) || !HiveIgnoreKeyTextOutputFormat.class.getName().equals(storageFormat.getOutputFormat())) { return ""; } String compressionCodecClass = conf.get("mapred.output.compression.codec"); if (compressionCodecClass == null) { return new DefaultCodec().getDefaultExtension(); } try { Class<? extends CompressionCodec> codecClass = conf.getClassByName(compressionCodecClass).asSubclass(CompressionCodec.class); return ReflectionUtil.newInstance(codecClass, conf).getDefaultExtension(); } catch (ClassNotFoundException e) { throw new PrestoException(HIVE_UNSUPPORTED_FORMAT, "Compression codec not found: " + compressionCodecClass, e); } catch (RuntimeException e) { throw new PrestoException(HIVE_UNSUPPORTED_FORMAT, "Failed to load compression codec: " + compressionCodecClass, e); } }
@Override public RecordReader getRecordReader(InputSplit split, JobConf job, Reporter reporter) throws IOException { BucketizedHiveInputSplit hsplit = (BucketizedHiveInputSplit) split; String inputFormatClassName = null; Class inputFormatClass = null; try { inputFormatClassName = hsplit.inputFormatClassName(); inputFormatClass = job.getClassByName(inputFormatClassName); } catch (Exception e) { throw new IOException("cannot find class " + inputFormatClassName); } pushProjectionsAndFilters(job, inputFormatClass, hsplit.getPath()); InputFormat inputFormat = getInputFormatFromCache(inputFormatClass, job); BucketizedHiveRecordReader<K, V> rr= new BucketizedHiveRecordReader(inputFormat, hsplit, job, reporter); rr.initIOContext(hsplit, job, inputFormatClass); return rr; }
@Override public RecordReader getRecordReader(InputSplit split, JobConf job, Reporter reporter) throws IOException { BucketizedHiveInputSplit hsplit = (BucketizedHiveInputSplit) split; String inputFormatClassName = null; Class inputFormatClass = null; try { inputFormatClassName = hsplit.inputFormatClassName(); inputFormatClass = job.getClassByName(inputFormatClassName); } catch (Exception e) { throw new IOException("cannot find class " + inputFormatClassName); } pushProjectionsAndFilters(job, inputFormatClass, hsplit.getPath()); InputFormat inputFormat = getInputFormatFromCache(inputFormatClass, job); BucketizedHiveRecordReader<K, V> rr= new BucketizedHiveRecordReader(inputFormat, hsplit, job, reporter); rr.initIOContext(hsplit, job, inputFormatClass); return rr; }
/** * Create a generic Hive RecordReader than can iterate over all chunks in a * CombinedFileSplit. */ @Override public RecordReader getRecordReader(InputSplit split, JobConf job, Reporter reporter) throws IOException { if (!(split instanceof CombineHiveInputSplit)) { return super.getRecordReader(split, job, reporter); } CombineHiveInputSplit hsplit = (CombineHiveInputSplit) split; String inputFormatClassName = null; Class inputFormatClass = null; try { inputFormatClassName = hsplit.inputFormatClassName(); inputFormatClass = job.getClassByName(inputFormatClassName); } catch (Exception e) { throw new IOException("cannot find class " + inputFormatClassName); } pushProjectionsAndFilters(job, inputFormatClass, hsplit.getPath(0)); return ShimLoader.getHadoopShims().getCombineFileInputFormat() .getRecordReader(job, (CombineFileSplit) split, reporter, CombineHiveRecordReader.class); }
Class<?> cls = jobConf().getClassByName(clsName);
/** * Create a generic Hive RecordReader than can iterate over all chunks in a * CombinedFileSplit. */ @Override public RecordReader getRecordReader(InputSplit split, JobConf job, Reporter reporter) throws IOException { if (!(split instanceof CombineHiveInputSplit)) { return super.getRecordReader(split, job, reporter); } CombineHiveInputSplit hsplit = (CombineHiveInputSplit) split; String inputFormatClassName = null; Class inputFormatClass = null; try { inputFormatClassName = hsplit.inputFormatClassName(); inputFormatClass = job.getClassByName(inputFormatClassName); } catch (Exception e) { throw new IOException("cannot find class " + inputFormatClassName); } pushProjectionsAndFilters(job, inputFormatClass, hsplit.getPath(0)); return ShimLoader.getHadoopShims().getCombineFileInputFormat() .getRecordReader(job, (CombineFileSplit) split, reporter, CombineHiveRecordReader.class); }
try { inputFormatClassName = hsplit.inputFormatClassName(); inputFormatClass = job.getClassByName(inputFormatClassName); } catch (Exception e) { throw new IOException("cannot find class " + inputFormatClassName, e);
try { inputFormatClassName = hsplit.inputFormatClassName(); inputFormatClass = job.getClassByName(inputFormatClassName); } catch (Exception e) { throw new IOException("cannot find class " + inputFormatClassName, e);
@SuppressWarnings("unchecked") private Class loadLibJar(JobConf jobConf) { try { return jobConf.getClassByName("testjar.ClassWordCount"); } catch (ClassNotFoundException e) { return null; } } }
private static <InterfaceType> Class<? extends InterfaceType> getClass(CommandLine cl, String key, JobConf conf, Class<InterfaceType> cls ) throws ClassNotFoundException { return conf.getClassByName(cl.getOptionValue(key)).asSubclass(cls); }
private static <InterfaceType> Class<? extends InterfaceType> getClass(CommandLine cl, String key, JobConf conf, Class<InterfaceType> cls ) throws ClassNotFoundException { return conf.getClassByName(cl.getOptionValue(key)).asSubclass(cls); }
private static <InterfaceType> Class<? extends InterfaceType> getClass(CommandLine cl, String key, JobConf conf, Class<InterfaceType> cls ) throws ClassNotFoundException { return conf.getClassByName((String) cl.getOptionValue(key)).asSubclass(cls); }
private static <InterfaceType> Class<? extends InterfaceType> getClass(CommandLine cl, String key, JobConf conf, Class<InterfaceType> cls ) throws ClassNotFoundException { return conf.getClassByName(cl.getOptionValue(key)).asSubclass(cls); }
private static <InterfaceType> Class<? extends InterfaceType> getClass(CommandLine cl, String key, JobConf conf, Class<InterfaceType> cls ) throws ClassNotFoundException { return conf.getClassByName(cl.getOptionValue(key)).asSubclass(cls); }
@SuppressWarnings({"unchecked", "RedundantCast"}) private static Class<? extends InputFormat<?, ?>> getInputFormatClass(JobConf conf, String inputFormatName) throws ClassNotFoundException { // CDH uses different names for Parquet if ("parquet.hive.DeprecatedParquetInputFormat".equals(inputFormatName) || "parquet.hive.MapredParquetInputFormat".equals(inputFormatName)) { return MapredParquetInputFormat.class; } Class<?> clazz = conf.getClassByName(inputFormatName); return (Class<? extends InputFormat<?, ?>>) clazz.asSubclass(InputFormat.class); }
@SuppressWarnings("unchecked") private Class loadLibJar(JobConf jobConf) { try { return jobConf.getClassByName("testjar.ClassWordCount"); } catch (ClassNotFoundException e) { return null; } } }
private static <InterfaceType> Class<? extends InterfaceType> getClass(CommandLine cl, String key, JobConf conf, Class<InterfaceType> cls ) throws ClassNotFoundException { return conf.getClassByName(cl.getOptionValue(key)).asSubclass(cls); }
private static <InterfaceType> Class<? extends InterfaceType> getClass(CommandLine cl, String key, JobConf conf, Class<InterfaceType> cls ) throws ClassNotFoundException { return conf.getClassByName((String) cl.getValue(key)).asSubclass(cls); }