@Override public void init(@Nonnull Context context) throws Exception { logger = context.jetInstance().getHazelcastInstance().getLoggingService().getLogger(ReadHdfsP.class); int totalParallelism = context.totalParallelism(); InputFormat inputFormat = jobConf.getInputFormat(); InputSplit[] splits = inputFormat.getSplits(jobConf, totalParallelism); IndexedInputSplit[] indexedInputSplits = new IndexedInputSplit[splits.length]; Arrays.setAll(indexedInputSplits, i -> new IndexedInputSplit(i, splits[i])); Address[] addrs = context.jetInstance().getCluster().getMembers() .stream().map(Member::getAddress).toArray(Address[]::new); assigned = assignSplitsToMembers(indexedInputSplits, addrs); printAssignments(assigned); }
@Override @Nonnull public List<Processor> get(int count) { return range(0, count).mapToObj(i -> { try { String uuid = context.jetInstance().getCluster().getLocalMember().getUuid(); TaskAttemptID taskAttemptID = new TaskAttemptID("jet-node-" + uuid, jobContext.getJobID().getId(), JOB_SETUP, i, 0); jobConf.set("mapred.task.id", taskAttemptID.toString()); jobConf.setInt("mapred.task.partition", i); TaskAttemptContextImpl taskAttemptContext = new TaskAttemptContextImpl(jobConf, taskAttemptID); @SuppressWarnings("unchecked") OutputFormat<K, V> outFormat = jobConf.getOutputFormat(); RecordWriter<K, V> recordWriter = outFormat.getRecordWriter( null, jobConf, uuid + '-' + valueOf(i), Reporter.NULL); return new WriteHdfsP<>( recordWriter, taskAttemptContext, outputCommitter, extractKeyFn, extractValueFn); } catch (IOException e) { throw new JetException(e); } }).collect(toList()); } }
@Override @Nonnull public List<Processor> get(int count) { return range(0, count).mapToObj(i -> { try { String uuid = context.jetInstance().getCluster().getLocalMember().getUuid(); TaskAttemptID taskAttemptID = new TaskAttemptID("jet-node-" + uuid, jobContext.getJobID().getId(), JOB_SETUP, i, 0); jobConf.set("mapred.task.id", taskAttemptID.toString()); jobConf.setInt("mapred.task.partition", i); TaskAttemptContextImpl taskAttemptContext = new TaskAttemptContextImpl(jobConf, taskAttemptID); @SuppressWarnings("unchecked") OutputFormat<K, V> outFormat = jobConf.getOutputFormat(); RecordWriter<K, V> recordWriter = outFormat.getRecordWriter( null, jobConf, uuid + '-' + valueOf(i), Reporter.NULL); return new WriteHdfsP<>( recordWriter, taskAttemptContext, outputCommitter, extractKeyFn, extractValueFn); } catch (IOException e) { throw new JetException(e); } }).collect(toList()); } }
@Override public void init(@Nonnull Context context) { logger = context.jetInstance().getHazelcastInstance().getLoggingService().getLogger(ReadHdfsP.class); try { int totalParallelism = context.totalParallelism(); InputFormat inputFormat = jobConf.getInputFormat(); InputSplit[] splits = inputFormat.getSplits(jobConf, totalParallelism); IndexedInputSplit[] indexedInputSplits = new IndexedInputSplit[splits.length]; Arrays.setAll(indexedInputSplits, i -> new IndexedInputSplit(i, splits[i])); Address[] addrs = context.jetInstance().getCluster().getMembers() .stream().map(Member::getAddress).toArray(Address[]::new); assigned = assignSplitsToMembers(indexedInputSplits, addrs); printAssignments(assigned); } catch (IOException e) { throw rethrow(e); } }
@Test public void test_cluster() { // When run("cluster"); // Then String actual = captureOut(); assertContains(actual, jet.getCluster().getLocalMember().getUuid()); assertContains(actual, "ACTIVE"); }