@Override public void initialize(final InputSplit inputSplit, final TaskAttemptContext taskAttemptContext) throws IOException, InterruptedException { final Configuration configuration = taskAttemptContext.getConfiguration(); final InputFormat<NullWritable, VertexWritable> inputFormat = ReflectionUtils.newInstance(configuration.getClass(Constants.GREMLIN_HADOOP_GRAPH_READER, InputFormat.class, InputFormat.class), configuration); if (!(inputFormat instanceof GraphFilterAware) && configuration.get(Constants.GREMLIN_HADOOP_GRAPH_FILTER, null) != null) this.graphFilter = VertexProgramHelper.deserialize(ConfUtil.makeApacheConfiguration(configuration), Constants.GREMLIN_HADOOP_GRAPH_FILTER); this.recordReader = inputFormat.createRecordReader(inputSplit, taskAttemptContext); this.recordReader.initialize(inputSplit, taskAttemptContext); }
public Traversal<Vertex, Edge> get() { return VertexProgramHelper.reverse(this.localMessageScope.getIncidentTraversal().get().asAdmin()); } }
public static void storeGraphFilter(final Configuration apacheConfiguration, final org.apache.hadoop.conf.Configuration hadoopConfiguration, final GraphFilter graphFilter) { if (graphFilter.hasFilter()) { VertexProgramHelper.serialize(graphFilter, apacheConfiguration, Constants.GREMLIN_HADOOP_GRAPH_FILTER); hadoopConfiguration.set(Constants.GREMLIN_HADOOP_GRAPH_FILTER, apacheConfiguration.getString(Constants.GREMLIN_HADOOP_GRAPH_FILTER)); } } }
public static <M> JavaPairRDD<Object, VertexWritable> prepareFinalGraphRDD( final JavaPairRDD<Object, VertexWritable> graphRDD, final JavaPairRDD<Object, ViewIncomingPayload<M>> viewIncomingRDD, final Set<VertexComputeKey> vertexComputeKeys) { // the graphRDD and the viewRDD must have the same partitioner if (graphRDD.partitioner().isPresent()) assert (graphRDD.partitioner().get().equals(viewIncomingRDD.partitioner().get())); final String[] vertexComputeKeysArray = VertexProgramHelper.vertexComputeKeysAsArray(vertexComputeKeys); // the compute keys as an array return graphRDD.leftOuterJoin(viewIncomingRDD) .mapValues(tuple -> { final StarGraph.StarVertex vertex = tuple._1().get(); vertex.dropVertexProperties(vertexComputeKeysArray); // drop all existing compute keys // attach the final computed view to the cached graph final List<DetachedVertexProperty<Object>> view = tuple._2().isPresent() ? tuple._2().get().getView() : Collections.emptyList(); for (final DetachedVertexProperty<Object> property : view) { if (!VertexProgramHelper.isTransientVertexComputeKey(property.key(), vertexComputeKeys)) property.attach(Attachable.Method.create(vertex)); } return tuple._1(); }); }
final String[] vertexComputeKeysArray = VertexProgramHelper.vertexComputeKeysAsArray(workerVertexProgram.getVertexComputeKeys()); // the compute keys as an array final SparkMessenger<M> messenger = new SparkMessenger<>();
/** * {@inheritDoc} */ @Override public B configure(final Object... keyValues) { VertexProgramHelper.legalConfigurationKeyValueArray(keyValues); for (int i = 0; i < keyValues.length; i = i + 2) { this.configuration.setProperty((String) keyValues[i], keyValues[i + 1]); } return (B) this; }
public static String[] vertexComputeKeysAsArray(final Set<VertexComputeKey> vertexComputeKeySet) { return VertexProgramHelper.vertexComputeKeysAsSet(vertexComputeKeySet).toArray(new String[vertexComputeKeySet.size()]); }
public static <M> JavaPairRDD<Object, VertexWritable> prepareFinalGraphRDD( final JavaPairRDD<Object, VertexWritable> graphRDD, final JavaPairRDD<Object, ViewIncomingPayload<M>> viewIncomingRDD, final Set<VertexComputeKey> vertexComputeKeys) { // the graphRDD and the viewRDD must have the same partitioner if (graphRDD.partitioner().isPresent()) assert (graphRDD.partitioner().get().equals(viewIncomingRDD.partitioner().get())); final String[] vertexComputeKeysArray = VertexProgramHelper.vertexComputeKeysAsArray(vertexComputeKeys); // the compute keys as an array return graphRDD.leftOuterJoin(viewIncomingRDD) .mapValues(tuple -> { final StarGraph.StarVertex vertex = tuple._1().get(); vertex.dropVertexProperties(vertexComputeKeysArray); // drop all existing compute keys // attach the final computed view to the cached graph final List<DetachedVertexProperty<Object>> view = tuple._2().isPresent() ? tuple._2().get().getView() : Collections.emptyList(); for (final DetachedVertexProperty<Object> property : view) { if (!VertexProgramHelper.isTransientVertexComputeKey(property.key(), vertexComputeKeys)) property.attach(Attachable.Method.create(vertex)); } return tuple._1(); }); }
final String[] vertexComputeKeysArray = VertexProgramHelper.vertexComputeKeysAsArray(workerVertexProgram.getVertexComputeKeys()); // the compute keys as an array final SparkMessenger<M> messenger = new SparkMessenger<>();
/** * {@inheritDoc} */ @Override public B configure(final Object... keyValues) { VertexProgramHelper.legalConfigurationKeyValueArray(keyValues); for (int i = 0; i < keyValues.length; i = i + 2) { this.configuration.setProperty((String) keyValues[i], keyValues[i + 1]); } return (B) this; }
public static String[] vertexComputeKeysAsArray(final Set<VertexComputeKey> vertexComputeKeySet) { return VertexProgramHelper.vertexComputeKeysAsSet(vertexComputeKeySet).toArray(new String[vertexComputeKeySet.size()]); }
@Override public void initialize(final InputSplit genericSplit, final TaskAttemptContext context) throws IOException { this.lineRecordReader.initialize(genericSplit, context); final Configuration configuration = context.getConfiguration(); if (configuration.get(Constants.GREMLIN_HADOOP_GRAPH_FILTER, null) != null) this.graphFilter = VertexProgramHelper.deserialize(ConfUtil.makeApacheConfiguration(configuration), Constants.GREMLIN_HADOOP_GRAPH_FILTER); this.engine = manager.getEngineByName(configuration.get(SCRIPT_ENGINE, "gremlin-groovy")); final FileSystem fs = FileSystem.get(configuration); try (final InputStream stream = fs.open(new Path(configuration.get(SCRIPT_FILE))); final InputStreamReader reader = new InputStreamReader(stream)) { final String parse = String.join("\n", IOUtils.toString(reader), READ_CALL); script = ((Compilable) engine).compile(parse); } catch (ScriptException e) { throw new IOException(e.getMessage()); } }
final Direction direction = TinkerMessenger.getDirection(incidentTraversal); final Edge[] edge = new Edge[1]; // simulates storage side-effects available in Gremlin, but not Java8 streams multiIterator.addIterator(StreamSupport.stream(Spliterators.spliteratorUnknownSize(VertexProgramHelper.reverse(incidentTraversal.asAdmin()), Spliterator.IMMUTABLE | Spliterator.SIZED), false) .map((Edge e) -> { edge[0] = e;
public static <M> JavaPairRDD<Object, VertexWritable> prepareFinalGraphRDD( final JavaPairRDD<Object, VertexWritable> graphRDD, final JavaPairRDD<Object, ViewIncomingPayload<M>> viewIncomingRDD, final Set<VertexComputeKey> vertexComputeKeys) { // the graphRDD and the viewRDD must have the same partitioner if (graphRDD.partitioner().isPresent()){ assert (graphRDD.partitioner().get().equals(viewIncomingRDD.partitioner().get()));} final String[] vertexComputeKeysArray = VertexProgramHelper.vertexComputeKeysAsArray(vertexComputeKeys); // the compute keys as an array return graphRDD.leftOuterJoin(viewIncomingRDD) .mapValues(tuple -> { final StarGraph.StarVertex vertex = tuple._1().get(); vertex.dropVertexProperties(vertexComputeKeysArray); // drop all existing compute keys // attach the final computed view to the cached graph final List<DetachedVertexProperty<Object>> view = tuple._2().isPresent() ? tuple._2().get().getView() : Collections.emptyList(); for (final DetachedVertexProperty<Object> property : view) { if (!VertexProgramHelper.isTransientVertexComputeKey(property.key(), vertexComputeKeys)){ property.attach(Attachable.Method.create(vertex));} } return tuple._1(); }); }
public void storeState(final Configuration configuration, final String configurationKey) { try { VertexProgramHelper.serialize(this, configuration, configurationKey); // the traversal can not be serialized (probably because of lambdas). As such, try direct reference. } catch (final IllegalArgumentException e) { configuration.setProperty(configurationKey, this); } }
final String[] vertexComputeKeysArray = VertexProgramHelper.vertexComputeKeysAsArray(workerVertexProgram.getVertexComputeKeys()); // the compute keys as an array final SparkMessenger<M> messenger = new SparkMessenger<>();
public static <S, E> PureTraversal<S, E> loadState(final Configuration configuration, final String configurationKey, final Graph graph) { final Object configValue = configuration.getProperty(configurationKey); final PureTraversal<S, E> pureTraversal = (configValue instanceof String ? (PureTraversal<S, E>) VertexProgramHelper.deserialize(configuration, configurationKey) : ((PureTraversal<S, E>) configValue)); pureTraversal.pureTraversal.setGraph(graph); return pureTraversal; }
public Traversal<Vertex, Edge> get() { return VertexProgramHelper.reverse(this.localMessageScope.getIncidentTraversal().get().asAdmin()); } }
public static <R> void storeHaltedTraversers(final Configuration configuration, final TraverserSet<R> haltedTraversers) { if (null != haltedTraversers && !haltedTraversers.isEmpty()) { try { VertexProgramHelper.serialize(haltedTraversers, configuration, HALTED_TRAVERSERS); } catch (final Exception e) { configuration.setProperty(HALTED_TRAVERSERS, haltedTraversers); } } }
@Override public void initialize(final InputSplit genericSplit, final TaskAttemptContext context) throws IOException { final FileSplit split = (FileSplit) genericSplit; final Configuration configuration = context.getConfiguration(); if (configuration.get(Constants.GREMLIN_HADOOP_GRAPH_FILTER, null) != null) this.graphFilter = VertexProgramHelper.deserialize(ConfUtil.makeApacheConfiguration(configuration), Constants.GREMLIN_HADOOP_GRAPH_FILTER); this.gryoReader = GryoReader.build().mapper( GryoMapper.build().addRegistries(IoRegistryHelper.createRegistries(ConfUtil.makeApacheConfiguration(configuration))).create()).create(); long start = split.getStart(); final Path file = split.getPath(); if (null != new CompressionCodecFactory(configuration).getCodec(file)) { throw new IllegalStateException("Compression is not supported for the (binary) Gryo format"); } // open the file and seek to the start of the split this.inputStream = file.getFileSystem(configuration).open(split.getPath()); this.splitLength = split.getLength(); if (this.splitLength > 0) this.splitLength -= (seekToHeader(this.inputStream, start) - start); }