@Override public InputFormat<?, ?> getInputFormat() throws IOException { if (hcatInputFormat == null) { hcatInputFormat = new HCatInputFormat(); } return hcatInputFormat; }
@SuppressWarnings("unchecked") private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { this.fieldNames = new String[in.readInt()]; for (int i = 0; i < this.fieldNames.length; i++) { this.fieldNames[i] = in.readUTF(); } Configuration configuration = new Configuration(); configuration.readFields(in); if (this.configuration == null) { this.configuration = configuration; } this.hCatInputFormat = new org.apache.hive.hcatalog.mapreduce.HCatInputFormat(); this.outputSchema = (HCatSchema) HCatUtil.deserialize(this.configuration.get("mapreduce.lib.hcat.output.schema")); }
/** * Set inputs to use for the job. This queries the metastore with the given input * specification and serializes matching partitions into the job conf for use by MR tasks. * @param conf the job configuration * @param dbName database name, which if null 'default' is used * @param tableName table name * @param filter the partition filter to use, can be null for no filter * @throws IOException on all errors */ public static HCatInputFormat setInput( Configuration conf, String dbName, String tableName, String filter) throws IOException { Preconditions.checkNotNull(conf, "required argument 'conf' is null"); Preconditions.checkNotNull(tableName, "required argument 'tableName' is null"); HCatInputFormat hCatInputFormat = new HCatInputFormat(); hCatInputFormat.conf = conf; hCatInputFormat.inputJobInfo = InputJobInfo.create(dbName, tableName, filter, null); try { InitializeInput.setInput(conf, hCatInputFormat.inputJobInfo); } catch (Exception e) { throw new IOException(e); } return hCatInputFormat; }
@Override public Iterator<HCatRecord> read() throws HCatException { HCatInputFormat inpFmt = new HCatInputFormat(); RecordReader<WritableComparable, HCatRecord> rr; try { TaskAttemptContext cntxt = ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(conf, new TaskAttemptID()); rr = inpFmt.createRecordReader(split, cntxt); rr.initialize(split, cntxt); } catch (IOException e) { throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e); } catch (InterruptedException e) { throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e); } return new HCatRecordItr(rr); }
@Override public InputFormat<?, ?> getInputFormat() throws IOException { if (hcatInputFormat == null) { hcatInputFormat = new HCatInputFormat(); } return hcatInputFormat; }
@Override public InputFormat<?, ?> getInputFormat() throws IOException { if (hcatInputFormat == null) { hcatInputFormat = new HCatInputFormat(); } return hcatInputFormat; }
/** * Set inputs to use for the job. This queries the metastore with the given input * specification and serializes matching partitions into the job conf for use by MR tasks. * @param conf the job configuration * @param dbName database name, which if null 'default' is used * @param tableName table name * @param filter the partition filter to use, can be null for no filter * @throws IOException on all errors */ public static HCatInputFormat setInput( Configuration conf, String dbName, String tableName, String filter) throws IOException { Preconditions.checkNotNull(conf, "required argument 'conf' is null"); Preconditions.checkNotNull(tableName, "required argument 'tableName' is null"); HCatInputFormat hCatInputFormat = new HCatInputFormat(); hCatInputFormat.conf = conf; hCatInputFormat.inputJobInfo = InputJobInfo.create(dbName, tableName, filter, null); try { InitializeInput.setInput(conf, hCatInputFormat.inputJobInfo); } catch (Exception e) { throw new IOException(e); } return hCatInputFormat; }
/** * Set inputs to use for the job. This queries the metastore with the given input * specification and serializes matching partitions into the job conf for use by MR tasks. * @param conf the job configuration * @param dbName database name, which if null 'default' is used * @param tableName table name * @param filter the partition filter to use, can be null for no filter * @throws IOException on all errors */ public static HCatInputFormat setInput( Configuration conf, String dbName, String tableName, String filter) throws IOException { Preconditions.checkNotNull(conf, "required argument 'conf' is null"); Preconditions.checkNotNull(tableName, "required argument 'tableName' is null"); HCatInputFormat hCatInputFormat = new HCatInputFormat(); hCatInputFormat.conf = conf; hCatInputFormat.inputJobInfo = InputJobInfo.create(dbName, tableName, filter, null); try { InitializeInput.setInput(conf, hCatInputFormat.inputJobInfo); } catch (Exception e) { throw new IOException(e); } return hCatInputFormat; }
/** * Set inputs to use for the job. This queries the metastore with the given input * specification and serializes matching partitions into the job conf for use by MR tasks. * @param conf the job configuration * @param dbName database name, which if null 'default' is used * @param tableName table name * @param filter the partition filter to use, can be null for no filter * @throws IOException on all errors */ public static HCatInputFormat setInput( Configuration conf, String dbName, String tableName, String filter) throws IOException { Preconditions.checkNotNull(conf, "required argument 'conf' is null"); Preconditions.checkNotNull(tableName, "required argument 'tableName' is null"); HCatInputFormat hCatInputFormat = new HCatInputFormat(); hCatInputFormat.conf = conf; hCatInputFormat.inputJobInfo = InputJobInfo.create(dbName, tableName, filter, null); try { InitializeInput.setInput(conf, hCatInputFormat.inputJobInfo); } catch (Exception e) { throw new IOException(e); } return hCatInputFormat; }
/** * Set inputs to use for the job. This queries the metastore with the given input * specification and serializes matching partitions into the job conf for use by MR tasks. * @param conf the job configuration * @param dbName database name, which if null 'default' is used * @param tableName table name * @param filter the partition filter to use, can be null for no filter * @throws IOException on all errors */ public static HCatInputFormat setInput( Configuration conf, String dbName, String tableName, String filter) throws IOException { Preconditions.checkNotNull(conf, "required argument 'conf' is null"); Preconditions.checkNotNull(tableName, "required argument 'tableName' is null"); HCatInputFormat hCatInputFormat = new HCatInputFormat(); hCatInputFormat.conf = conf; hCatInputFormat.inputJobInfo = InputJobInfo.create(dbName, tableName, filter, null); try { InitializeInput.setInput(conf, hCatInputFormat.inputJobInfo); } catch (Exception e) { throw new IOException(e); } return hCatInputFormat; }
@Override public Iterator<HCatRecord> read() throws HCatException { HCatInputFormat inpFmt = new HCatInputFormat(); RecordReader<WritableComparable, HCatRecord> rr; try { TaskAttemptContext cntxt = ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(conf, new TaskAttemptID()); rr = inpFmt.createRecordReader(split, cntxt); rr.initialize(split, cntxt); } catch (IOException e) { throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e); } catch (InterruptedException e) { throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e); } return new HCatRecordItr(rr); }
@Override public Iterator<HCatRecord> read() throws HCatException { HCatInputFormat inpFmt = new HCatInputFormat(); RecordReader<WritableComparable, HCatRecord> rr; try { TaskAttemptContext cntxt = ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(conf, new TaskAttemptID()); rr = inpFmt.createRecordReader(split, cntxt); rr.initialize(split, cntxt); } catch (IOException e) { throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e); } catch (InterruptedException e) { throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e); } return new HCatRecordItr(rr); }
@Override public Iterator<HCatRecord> read() throws HCatException { HCatInputFormat inpFmt = new HCatInputFormat(); RecordReader<WritableComparable, HCatRecord> rr; try { TaskAttemptContext cntxt = ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(conf, new TaskAttemptID()); rr = inpFmt.createRecordReader(split, cntxt); rr.initialize(split, cntxt); } catch (IOException e) { throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e); } catch (InterruptedException e) { throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e); } return new HCatRecordItr(rr); }
@Override public Iterator<HCatRecord> read() throws HCatException { HCatInputFormat inpFmt = new HCatInputFormat(); RecordReader<WritableComparable, HCatRecord> rr; try { TaskAttemptContext cntxt = ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(conf, new TaskAttemptID()); rr = inpFmt.createRecordReader(split, cntxt); rr.initialize(split, cntxt); } catch (IOException e) { throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e); } catch (InterruptedException e) { throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e); } return new HCatRecordItr(rr); }