@Override public void workerIterationStart(TitanGraph graph, Configuration config, ScanMetrics metrics) { for (Map.Entry<MapReduce, FulgoraMapEmitter> mapJob : mapJobs.entrySet()) { mapJob.getKey().workerStart(MapReduce.Stage.MAP); } }
@Override public void workerIterationStart(JanusGraph graph, Configuration config, ScanMetrics metrics) { for (Map.Entry<MapReduce, FulgoraMapEmitter> mapJob : mapJobs.entrySet()) { mapJob.getKey().workerStart(MapReduce.Stage.MAP); } }
public static Executor getVertexMapJob(StandardTitanGraph graph, FulgoraVertexMemory vertexMemory, Map<MapReduce, FulgoraMapEmitter> mapJobs) { VertexMapJob job = new VertexMapJob(graph.getIDManager(), vertexMemory, mapJobs); for (Map.Entry<MapReduce, FulgoraMapEmitter> mapJob : mapJobs.entrySet()) { mapJob.getKey().workerStart(MapReduce.Stage.MAP); } return new Executor(graph, job); }
public static Executor getVertexMapJob(StandardJanusGraph graph, FulgoraVertexMemory vertexMemory, Map<MapReduce, FulgoraMapEmitter> mapJobs) { VertexMapJob job = new VertexMapJob(graph.getIDManager(), vertexMemory, mapJobs); for (Map.Entry<MapReduce, FulgoraMapEmitter> mapJob : mapJobs.entrySet()) { mapJob.getKey().workerStart(MapReduce.Stage.MAP); } return new Executor(graph, job); }
public MapIterator(final MapReduce<K, V, ?, ?, ?> mapReduce, final Iterator<Tuple2<Object, VertexWritable>> inputIterator) { this.inputIterator = inputIterator; this.mapReduce = mapReduce; this.mapReduce.workerStart(MapReduce.Stage.MAP); }
public ReduceIterator(final MapReduce<K, V, OK, OV, ?> mapReduce, final Iterator<Tuple2<K, Iterable<V>>> inputIterator) { this.inputIterator = inputIterator; this.mapReduce = mapReduce; this.mapReduce.workerStart(MapReduce.Stage.REDUCE); }
public CombineIterator(final MapReduce<K, V, OK, OV, ?> mapReduce, final Iterator<Tuple2<K, V>> inputIterator) { this.inputIterator = inputIterator; this.mapReduce = mapReduce; this.mapReduce.workerStart(MapReduce.Stage.COMBINE); }
final FulgoraReduceEmitter<?, ?> reduceEmitter = new FulgoraReduceEmitter<>(); try (WorkerPool workers = new WorkerPool(numThreads)) { workers.submit(() -> mapReduce.workerStart(MapReduce.Stage.REDUCE)); for (final Map.Entry queueEntry : mapEmitter.reduceMap.entrySet()) { workers.submit(() -> mapReduce.reduce(queueEntry.getKey(), ((Iterable) queueEntry.getValue()).iterator(), reduceEmitter));
final FulgoraReduceEmitter<?, ?> reduceEmitter = new FulgoraReduceEmitter<>(); try (WorkerPool workers = new WorkerPool(numThreads)) { workers.submit(() -> mapReduce.workerStart(MapReduce.Stage.REDUCE)); for (final Map.Entry queueEntry : mapEmitter.reduceMap.entrySet()) { if (null == queueEntry) break;
@Override public void setup(final Mapper<NullWritable, VertexWritable, ObjectWritable, ObjectWritable>.Context context) { final Configuration apacheConfiguration = ConfUtil.makeApacheConfiguration(context.getConfiguration()); KryoShimServiceLoader.applyConfiguration(apacheConfiguration); this.mapReduce = MapReduce.createMapReduce(HadoopGraph.open(apacheConfiguration), apacheConfiguration); this.mapReduce.workerStart(MapReduce.Stage.MAP); }
@Override public void setup(final Reducer<ObjectWritable, ObjectWritable, ObjectWritable, ObjectWritable>.Context context) { final Configuration apacheConfiguration = ConfUtil.makeApacheConfiguration(context.getConfiguration()); KryoShimServiceLoader.applyConfiguration(apacheConfiguration); this.mapReduce = MapReduce.createMapReduce(HadoopGraph.open(apacheConfiguration), apacheConfiguration); this.mapReduce.workerStart(MapReduce.Stage.REDUCE); }
@Override public void setup(final Reducer<ObjectWritable, ObjectWritable, ObjectWritable, ObjectWritable>.Context context) { final Configuration apacheConfiguration = ConfUtil.makeApacheConfiguration(context.getConfiguration()); KryoShimServiceLoader.applyConfiguration(apacheConfiguration); this.mapReduce = MapReduce.createMapReduce(HadoopGraph.open(apacheConfiguration), apacheConfiguration); this.mapReduce.workerStart(MapReduce.Stage.COMBINE); }
public MapIterator(final MapReduce<K, V, ?, ?, ?> mapReduce, final Iterator<Tuple2<Object, VertexWritable>> inputIterator) { this.inputIterator = inputIterator; this.mapReduce = mapReduce; this.mapReduce.workerStart(MapReduce.Stage.MAP); }
public ReduceIterator(final MapReduce<K, V, OK, OV, ?> mapReduce, final Iterator<Tuple2<K, Iterable<V>>> inputIterator) { this.inputIterator = inputIterator; this.mapReduce = mapReduce; this.mapReduce.workerStart(MapReduce.Stage.REDUCE); }
workers.setMapReduce(mapReduce); workers.executeMapReduce(workerMapReduce -> { workerMapReduce.workerStart(MapReduce.Stage.MAP); while (true) { if (Thread.interrupted()) throw new TraversalInterruptedException(); final SynchronizedIterator<Map.Entry<?, Queue<?>>> keyValues = new SynchronizedIterator((Iterator) mapEmitter.reduceMap.entrySet().iterator()); workers.executeMapReduce(workerMapReduce -> { workerMapReduce.workerStart(MapReduce.Stage.REDUCE); while (true) { if (Thread.interrupted()) throw new TraversalInterruptedException();
@Override public void workerIterationStart(TitanGraph graph, Configuration config, ScanMetrics metrics) { for (Map.Entry<MapReduce, FulgoraMapEmitter> mapJob : mapJobs.entrySet()) { mapJob.getKey().workerStart(MapReduce.Stage.MAP); } }
public static Executor getVertexMapJob(StandardTitanGraph graph, FulgoraVertexMemory vertexMemory, Map<MapReduce, FulgoraMapEmitter> mapJobs) { VertexMapJob job = new VertexMapJob(graph.getIDManager(), vertexMemory, mapJobs); for (Map.Entry<MapReduce, FulgoraMapEmitter> mapJob : mapJobs.entrySet()) { mapJob.getKey().workerStart(MapReduce.Stage.MAP); } return new Executor(graph, job); }
@Override public void setup(final Mapper<NullWritable, VertexWritable, ObjectWritable, ObjectWritable>.Context context) { final Configuration apacheConfiguration = ConfUtil.makeApacheConfiguration(context.getConfiguration()); KryoShimServiceLoader.applyConfiguration(apacheConfiguration); this.mapReduce = MapReduce.createMapReduce(HadoopGraph.open(apacheConfiguration), apacheConfiguration); this.mapReduce.workerStart(MapReduce.Stage.MAP); }
@Override public void setup(final Reducer<ObjectWritable, ObjectWritable, ObjectWritable, ObjectWritable>.Context context) { final Configuration apacheConfiguration = ConfUtil.makeApacheConfiguration(context.getConfiguration()); KryoShimServiceLoader.applyConfiguration(apacheConfiguration); this.mapReduce = MapReduce.createMapReduce(HadoopGraph.open(apacheConfiguration), apacheConfiguration); this.mapReduce.workerStart(MapReduce.Stage.REDUCE); }
@Override public void setup(final Reducer<ObjectWritable, ObjectWritable, ObjectWritable, ObjectWritable>.Context context) { final Configuration apacheConfiguration = ConfUtil.makeApacheConfiguration(context.getConfiguration()); KryoShimServiceLoader.applyConfiguration(apacheConfiguration); this.mapReduce = MapReduce.createMapReduce(HadoopGraph.open(apacheConfiguration), apacheConfiguration); this.mapReduce.workerStart(MapReduce.Stage.COMBINE); }