@Override public void write(NullWritable key, VertexWritable value) throws IOException, InterruptedException { // TODO tolerate possibility that concurrent OLTP activity has deleted the vertex? maybe configurable... Object vertexID = value.get().id(); Vertex vertex = tx.vertices(vertexID).next(); Iterator<VertexProperty<Object>> vpIter = value.get().properties(); while (vpIter.hasNext()) { VertexProperty<Object> vp = vpIter.next(); if (!persistableKeys.isEmpty() && !persistableKeys.contains(vp.key())) { log.debug("[vid {}] skipping key {}", vertexID, vp.key()); continue; } vertex.property(vp.key(), vp.value()); log.debug("[vid {}] set {}={}", vertexID, vp.key(), vp.value()); } }
@Override public JavaPairRDD<Object, VertexWritable> readGraphRDD(final Configuration configuration, final JavaSparkContext sparkContext) { final org.apache.hadoop.conf.Configuration hadoopConfiguration = ConfUtil.makeHadoopConfiguration(configuration); return sparkContext.newAPIHadoopRDD(hadoopConfiguration, (Class<InputFormat<NullWritable, VertexWritable>>) hadoopConfiguration.getClass(Constants.GREMLIN_HADOOP_GRAPH_READER, InputFormat.class), NullWritable.class, VertexWritable.class) .mapToPair(tuple -> new Tuple2<>(tuple._2().get().id(), new VertexWritable(tuple._2().get()))); }
@Override public boolean nextKeyValue() throws IOException, InterruptedException { if (null == this.graphFilter) { return this.recordReader.nextKeyValue(); } else { while (true) { if (this.recordReader.nextKeyValue()) { final VertexWritable vertexWritable = this.recordReader.getCurrentValue(); final Optional<StarGraph.StarVertex> vertex = vertexWritable.get().applyGraphFilter(this.graphFilter); if (vertex.isPresent()) { vertexWritable.set(vertex.get()); return true; } } else { return false; } } } }
@Override public boolean nextKeyValue() throws IOException, InterruptedException { while (reader.nextKeyValue()) { // TODO janusgraph05 integration -- the duplicate() call may be unnecessary final TinkerVertex maybeNullTinkerVertex = deserializer.readHadoopVertex(reader.getCurrentKey(), reader.getCurrentValue()); if (null != maybeNullTinkerVertex) { vertex = new VertexWritable(maybeNullTinkerVertex); if (graphFilter == null) { return true; } else { final Optional<StarGraph.StarVertex> vertexWritable = vertex.get().applyGraphFilter(graphFilter); if (vertexWritable.isPresent()) { vertex.set(vertexWritable.get()); return true; } } } } return false; }
@Override public boolean nextKeyValue() throws IOException, InterruptedException { while (reader.nextKeyValue()) { // TODO titan05 integration -- the duplicate() call may be unnecessary final TinkerVertex maybeNullTinkerVertex = deser.readHadoopVertex(reader.getCurrentKey(), reader.getCurrentValue()); if (null != maybeNullTinkerVertex) { vertex = new VertexWritable(maybeNullTinkerVertex); //vertexQuery.filterRelationsOf(vertex); // TODO reimplement vertexquery filtering return true; } } return false; }
public VertexWritable(final Vertex vertex) { this.set(vertex); }
private void readObject(final ObjectInputStream inputStream) throws IOException, ClassNotFoundException { this.readFields(inputStream); }
private void writeObject(final ObjectOutputStream outputStream) throws IOException { this.write(outputStream); }
@Override public boolean nextKeyValue() throws IOException, InterruptedException { while (reader.nextKeyValue()) { // TODO janusgraph05 integration -- the duplicate() call may be unnecessary final TinkerVertex maybeNullTinkerVertex = deserializer.readHadoopVertex(reader.getCurrentKey(), reader.getCurrentValue()); if (null != maybeNullTinkerVertex) { vertex = new VertexWritable(maybeNullTinkerVertex); if (graphFilter == null) { return true; } else { final Optional<StarGraph.StarVertex> vertexWritable = vertex.get().applyGraphFilter(graphFilter); if (vertexWritable.isPresent()) { vertex.set(vertexWritable.get()); return true; } } } } return false; }
@Override public <I extends InputShim> VertexWritable read(final KryoShim<I, ?> kryo, final I input, final Class<VertexWritable> clazz) { return new VertexWritable(kryo.readObject(input, StarGraph.class).getStarVertex()); } }
final Optional<Vertex> vertex = this.gryoReader.readVertex(in, this.graphFilter); if (vertex.isPresent()) { this.vertexWritable.set(vertex.get()); return true; } else {
private void readObject(final ObjectInputStream inputStream) throws IOException, ClassNotFoundException { this.readFields(inputStream); }
private void writeObject(final ObjectOutputStream outputStream) throws IOException { this.write(outputStream); }
@Override public JavaPairRDD<Object, VertexWritable> readGraphRDD(final Configuration configuration, final JavaSparkContext sparkContext) { final org.apache.hadoop.conf.Configuration hadoopConfiguration = ConfUtil.makeHadoopConfiguration(configuration); return sparkContext.newAPIHadoopRDD(hadoopConfiguration, (Class<InputFormat<NullWritable, VertexWritable>>) hadoopConfiguration.getClass(Constants.GREMLIN_HADOOP_GRAPH_READER, InputFormat.class), NullWritable.class, VertexWritable.class) .mapToPair(tuple -> new Tuple2<>(tuple._2().get().id(), new VertexWritable(tuple._2().get()))); }
@Override public boolean nextKeyValue() throws IOException, InterruptedException { if (null == this.graphFilter) { return this.recordReader.nextKeyValue(); } else { while (true) { if (this.recordReader.nextKeyValue()) { final VertexWritable vertexWritable = this.recordReader.getCurrentValue(); final Optional<StarGraph.StarVertex> vertex = vertexWritable.get().applyGraphFilter(this.graphFilter); if (vertex.isPresent()) { vertexWritable.set(vertex.get()); return true; } } else { return false; } } } }
@Override public <I extends InputShim> VertexWritable read(final KryoShim<I, ?> kryo, final I input, final Class<VertexWritable> clazz) { return new VertexWritable(kryo.readObject(input, StarGraph.class).getStarVertex()); } }
@Override public boolean nextKeyValue() throws IOException { if (!this.lineRecordReader.nextKeyValue()) return false; try (InputStream in = new ByteArrayInputStream(this.lineRecordReader.getCurrentValue().getBytes())) { this.vertexWritable.set(this.hasEdges ? this.graphsonReader.readVertex(in, Attachable::get, Attachable::get, Direction.BOTH) : this.graphsonReader.readVertex(in, Attachable::get)); return true; } }
@Override public void write(final NullWritable key, final VertexWritable vertex) throws IOException { if (null != vertex) { if (this.hasEdges) { graphsonWriter.writeVertex(this.outputStream, vertex.get(), Direction.BOTH); this.outputStream.write(NEWLINE); } else { graphsonWriter.writeVertex(this.outputStream, vertex.get()); this.outputStream.write(NEWLINE); } } }
@Override public boolean nextKeyValue() throws IOException, InterruptedException { while (reader.nextKeyValue()) { // TODO titan05 integration -- the duplicate() call may be unnecessary final TinkerVertex maybeNullTinkerVertex = deser.readHadoopVertex(reader.getCurrentKey(), reader.getCurrentValue()); if (null != maybeNullTinkerVertex) { vertex = new VertexWritable(maybeNullTinkerVertex); //vertexQuery.filterRelationsOf(vertex); // TODO reimplement vertexquery filtering return true; } } return false; }