@Override public ReaderContext prepareRead() throws HCatException { try { Job job = new Job(conf); HCatInputFormat hcif = HCatInputFormat.setInput( job, re.getDbName(), re.getTableName(), re.getFilterString()); ReaderContextImpl cntxt = new ReaderContextImpl(); cntxt.setInputSplits(hcif.getSplits( ShimLoader.getHadoopShims().getHCatShim().createJobContext(job.getConfiguration(), null))); cntxt.setConf(job.getConfiguration()); return cntxt; } catch (IOException e) { throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e); } catch (InterruptedException e) { throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e); } }
int readCount = 0; for (int i = 0; i < splits.size(); i++) { TaskAttemptContext tac = ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext(jonconf, new TaskAttemptID()); RecordReader<LongWritable, BytesRefArrayWritable> rr = inputFormat.createRecordReader(splits.get(i), tac);
context.getJobID(), InternalUtil.createReporter(HCatMapRedUtil.createTaskAttemptContext(jobConf, ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptID()))); HCatOutputFormat.configureOutputStorageHandler(currContext, jobInfo, fullPartSpec); contextDiscoveredByPath.put(st.getPath().toString(), currContext);
if (!ShimLoader.getHadoopShims().getHCatShim().isFileInHDFS(fs, partPath)) { applyGroupAndPerms(fs, partPath, perms, grpName, true);
if (!ShimLoader.getHadoopShims().getHCatShim().isFileInHDFS(fs, partPath)) { applyGroupAndPerms(fs, partPath, perms, grpName, true);
if (!ShimLoader.getHadoopShims().getHCatShim().isFileInHDFS(fs, partPath)) { applyGroupAndPerms(fs, partPath, perms, grpName, true);
if (!ShimLoader.getHadoopShims().getHCatShim().isFileInHDFS(fs, partPath)) { applyGroupAndPerms(fs, partPath, perms, grpName, true);
context.getJobID(), InternalUtil.createReporter(HCatMapRedUtil.createTaskAttemptContext(jobConf, ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptID()))); HCatOutputFormat.configureOutputStorageHandler(currContext, jobInfo, fullPartSpec); contextDiscoveredByPath.put(st.getPath().toString(), currContext);
context.getJobID(), InternalUtil.createReporter(HCatMapRedUtil.createTaskAttemptContext(jobConf, ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptID()))); HCatOutputFormat.configureOutputStorageHandler(currContext, jobInfo, fullPartSpec); contextDiscoveredByPath.put(st.getPath().toString(), currContext);
context.getJobID(), InternalUtil.createReporter(HCatMapRedUtil.createTaskAttemptContext(jobConf, ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptID()))); HCatOutputFormat.configureOutputStorageHandler(currContext, jobInfo, fullPartSpec); contextDiscoveredByPath.put(st.getPath().toString(), currContext);
context.getJobID(), InternalUtil.createReporter(HCatMapRedUtil.createTaskAttemptContext(jobConf, ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptID()))); HCatOutputFormat.configureOutputStorageHandler(currContext, jobInfo, fullPartSpec); contextDiscoveredByPath.put(st.getPath().toString(), currContext);
new org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenSelector(); Token jtToken = jtTokenSelector.selectToken(org.apache.hadoop.security.SecurityUtil.buildTokenService( ShimLoader.getHadoopShims().getHCatShim().getResourceManagerAddress(conf)), ugi.getTokens()); if (jtToken == null) {
new org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenSelector(); Token jtToken = jtTokenSelector.selectToken(org.apache.hadoop.security.SecurityUtil.buildTokenService( ShimLoader.getHadoopShims().getHCatShim().getResourceManagerAddress(conf)), ugi.getTokens()); if (jtToken == null) {
new org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenSelector(); Token jtToken = jtTokenSelector.selectToken(org.apache.hadoop.security.SecurityUtil.buildTokenService( ShimLoader.getHadoopShims().getHCatShim().getResourceManagerAddress(conf)), ugi.getTokens()); if (jtToken == null) {
new org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenSelector(); Token jtToken = jtTokenSelector.selectToken(org.apache.hadoop.security.SecurityUtil.buildTokenService( ShimLoader.getHadoopShims().getHCatShim().getResourceManagerAddress(conf)), ugi.getTokens()); if (jtToken == null) {
setVarsInConf(id); HCatOutputFormat outFormat = new HCatOutputFormat(); TaskAttemptContext cntxt = ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext( conf, new TaskAttemptID(ShimLoader.getHadoopShims().getHCatShim().createTaskID(), id)); OutputCommitter committer = null; RecordWriter<WritableComparable<?>, HCatRecord> writer;
setVarsInConf(id); HCatOutputFormat outFormat = new HCatOutputFormat(); TaskAttemptContext cntxt = ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext( conf, new TaskAttemptID(ShimLoader.getHadoopShims().getHCatShim().createTaskID(), id)); OutputCommitter committer = null; RecordWriter<WritableComparable<?>, HCatRecord> writer;
setVarsInConf(id); HCatOutputFormat outFormat = new HCatOutputFormat(); TaskAttemptContext cntxt = ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext( conf, new TaskAttemptID(ShimLoader.getHadoopShims().getHCatShim().createTaskID(), id)); OutputCommitter committer = null; RecordWriter<WritableComparable<?>, HCatRecord> writer;
setVarsInConf(id); HCatOutputFormat outFormat = new HCatOutputFormat(); TaskAttemptContext cntxt = ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext( conf, new TaskAttemptID(ShimLoader.getHadoopShims().getHCatShim().createTaskID(), id)); OutputCommitter committer = null; RecordWriter<WritableComparable<?>, HCatRecord> writer;
@Override public WriterContext prepareWrite() throws HCatException { OutputJobInfo jobInfo = OutputJobInfo.create(we.getDbName(), we.getTableName(), we.getPartitionKVs()); Job job; try { job = new Job(conf); HCatOutputFormat.setOutput(job, jobInfo); HCatOutputFormat.setSchema(job, HCatOutputFormat.getTableSchema(job.getConfiguration())); HCatOutputFormat outFormat = new HCatOutputFormat(); outFormat.checkOutputSpecs(job); outFormat.getOutputCommitter(ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptContext( job.getConfiguration(), ShimLoader.getHadoopShims().getHCatShim().createTaskAttemptID())).setupJob(job); } catch (IOException e) { throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e); } catch (InterruptedException e) { throw new HCatException(ErrorType.ERROR_NOT_INITIALIZED, e); } WriterContextImpl cntxt = new WriterContextImpl(); cntxt.setConf(job.getConfiguration()); return cntxt; }