public void setupContext(List<Path> paths) throws HiveException { int segmentLen = paths.size(); FetchOperator.setFetchOperatorContext(jobConf, fetchWork.getPartDir()); FetchOperator[] segments = segmentsForSize(segmentLen); for (int i = 0 ; i < segmentLen; i++) { Path path = paths.get(i); if (segments[i] == null) { segments[i] = new FetchOperator(fetchWork, new JobConf(jobConf)); } segments[i].setupContext(Arrays.asList(path)); } initialize(segmentLen); for (int i = 0; i < segmentLen; i++) { if (nextHive(i)) { put(i); } } counter = 0; }
public void setupContext(List<Path> paths) throws HiveException { int segmentLen = paths.size(); FetchOperator.setFetchOperatorContext(jobConf, fetchWork.getPartDir()); FetchOperator[] segments = segmentsForSize(segmentLen); for (int i = 0 ; i < segmentLen; i++) { Path path = paths.get(i); if (segments[i] == null) { segments[i] = new FetchOperator(fetchWork, new JobConf(jobConf)); } segments[i].setupContext(Arrays.asList(path)); } initialize(segmentLen); for (int i = 0; i < segmentLen; i++) { if (nextHive(i)) { put(i); } } counter = 0; }
private void setUpFetchOpContext(FetchOperator fetchOp, String alias, String currentInputFile) throws Exception { BucketMapJoinContext bucketMatcherCxt = this.work.getBucketMapjoinContext(); Class<? extends BucketMatcher> bucketMatcherCls = bucketMatcherCxt.getBucketMatcherClass(); BucketMatcher bucketMatcher = ReflectionUtils.newInstance(bucketMatcherCls, null); bucketMatcher.setAliasBucketFileNameMapping(bucketMatcherCxt.getAliasBucketFileNameMapping()); List<Path> aliasFiles = bucketMatcher.getAliasBucketFiles(currentInputFile, bucketMatcherCxt .getMapJoinBigTableAlias(), alias); fetchOp.setupContext(aliasFiles); }
private void setUpFetchOpContext(FetchOperator fetchOp, String alias, String currentInputFile) throws Exception { BucketMapJoinContext bucketMatcherCxt = this.work.getBucketMapjoinContext(); Class<? extends BucketMatcher> bucketMatcherCls = bucketMatcherCxt.getBucketMatcherClass(); BucketMatcher bucketMatcher = ReflectionUtils.newInstance(bucketMatcherCls, null); bucketMatcher.setAliasBucketFileNameMapping(bucketMatcherCxt.getAliasBucketFileNameMapping()); List<Path> aliasFiles = bucketMatcher.getAliasBucketFiles(currentInputFile, bucketMatcherCxt .getMapJoinBigTableAlias(), alias); fetchOp.setupContext(aliasFiles); }
private void setUpFetchOpContext(FetchOperator fetchOp, String alias, String currentInputFile) throws Exception { BucketMapJoinContext bucketMatcherCxt = this.work.getBucketMapjoinContext(); Class<? extends BucketMatcher> bucketMatcherCls = bucketMatcherCxt.getBucketMatcherClass(); BucketMatcher bucketMatcher = (BucketMatcher) ReflectionUtils.newInstance(bucketMatcherCls, null); bucketMatcher.setAliasBucketFileNameMapping(bucketMatcherCxt.getAliasBucketFileNameMapping()); List<Path> aliasFiles = bucketMatcher.getAliasBucketFiles(currentInputFile, bucketMatcherCxt .getMapJoinBigTableAlias(), alias); Iterator<Path> iter = aliasFiles.iterator(); fetchOp.setupContext(iter, null); }
public void setupContext(List<Path> paths) throws HiveException { int segmentLen = paths.size(); FetchOperator.setFetchOperatorContext(jobConf, fetchWork.getPartDir()); FetchOperator[] segments = segmentsForSize(segmentLen); for (int i = 0 ; i < segmentLen; i++) { Path path = paths.get(i); if (segments[i] == null) { segments[i] = new FetchOperator(fetchWork, new JobConf(jobConf)); } segments[i].setupContext(Arrays.asList(path)); } initialize(segmentLen); for (int i = 0; i < segmentLen; i++) { if (nextHive(i)) { put(i); } } counter = 0; }
private void setUpFetchOpContext(FetchOperator fetchOp, String alias, String currentInputFile) throws Exception { BucketMapJoinContext bucketMatcherCxt = this.work.getBucketMapjoinContext(); Class<? extends BucketMatcher> bucketMatcherCls = bucketMatcherCxt.getBucketMatcherClass(); BucketMatcher bucketMatcher = ReflectionUtils.newInstance(bucketMatcherCls, null); bucketMatcher.setAliasBucketFileNameMapping(bucketMatcherCxt.getAliasBucketFileNameMapping()); List<Path> aliasFiles = bucketMatcher.getAliasBucketFiles(currentInputFile, bucketMatcherCxt .getMapJoinBigTableAlias(), alias); fetchOp.setupContext(aliasFiles); }
private void setUpFetchOpContext(FetchOperator fetchOp, String alias) throws Exception { String currentInputFile = HiveConf.getVar(jc, HiveConf.ConfVars.HADOOPMAPFILENAME); BucketMapJoinContext bucketMatcherCxt = this.localWork .getBucketMapjoinContext(); Class<? extends BucketMatcher> bucketMatcherCls = bucketMatcherCxt .getBucketMatcherClass(); BucketMatcher bucketMatcher = (BucketMatcher) ReflectionUtils.newInstance( bucketMatcherCls, null); bucketMatcher.setAliasBucketFileNameMapping(bucketMatcherCxt .getAliasBucketFileNameMapping()); List<Path> aliasFiles = bucketMatcher.getAliasBucketFiles(currentInputFile, bucketMatcherCxt.getMapJoinBigTableAlias(), alias); Iterator<Path> iter = aliasFiles.iterator(); fetchOp.setupContext(iter, null); }
private void setUpFetchOpContext(FetchOperator fetchOp, String alias) { String currentInputFile = this.getExecContext().getCurrentInputFile(); BucketMapJoinContext bucketMatcherCxt = this.localWork .getBucketMapjoinContext(); Class<? extends BucketMatcher> bucketMatcherCls = bucketMatcherCxt .getBucketMatcherClass(); BucketMatcher bucketMatcher = (BucketMatcher) ReflectionUtils.newInstance( bucketMatcherCls, null); this.getExecContext().setFileId(bucketMatcherCxt.getBucketFileNameMapping().get(currentInputFile)); LOG.info("set task id: " + this.getExecContext().getFileId()); bucketMatcher.setAliasBucketFileNameMapping(bucketMatcherCxt .getAliasBucketFileNameMapping()); List<Path> aliasFiles = bucketMatcher.getAliasBucketFiles(currentInputFile, bucketMatcherCxt.getMapJoinBigTableAlias(), alias); Iterator<Path> iter = aliasFiles.iterator(); fetchOp.setupContext(iter, null); }