public FulgoraVertexMemory(int numVertices, final IDManager idManager, final VertexProgram<M> vertexProgram) { Preconditions.checkArgument(numVertices>=0 && vertexProgram!=null && idManager!=null); vertexStates = new NonBlockingHashMapLong<>(numVertices); partitionVertices = new NonBlockingHashMapLong<>(64); this.idManager = idManager; this.combiner = FulgoraUtil.getMessageCombiner(vertexProgram); this.elementKeyMap = getIdMap(vertexProgram.getElementComputeKeys()); this.previousScopes = ImmutableMap.of(); }
@Override public RecordWriter<NullWritable, VertexWritable> getRecordWriter(TaskAttemptContext taskAttemptContext) throws IOException, InterruptedException { synchronized (this) { if (null == graph) { Configuration hadoopConf = taskAttemptContext.getConfiguration(); ModifiableHadoopConfiguration mhc = ModifiableHadoopConfiguration.of(TitanHadoopConfiguration.MAPRED_NS, hadoopConf); graph = (StandardTitanGraph) TitanFactory.open(mhc.getTitanGraphConf()); } } // Special case for a TP3 vertex program: persist only those properties whose keys are // returned by VertexProgram.getComputeKeys() if (null == persistableKeys) { try { persistableKeys = VertexProgram.createVertexProgram(graph, ConfUtil.makeApacheConfiguration(taskAttemptContext.getConfiguration())).getElementComputeKeys(); log.debug("Set persistableKeys={}", Joiner.on(",").join(persistableKeys)); } catch (Exception e) { log.debug("Unable to detect or instantiate vertex program", e); persistableKeys = ImmutableSet.of(); } } StandardTitanTx tx = transactions.computeIfAbsent(taskAttemptContext.getTaskAttemptID(), id -> (StandardTitanTx)graph.newTransaction()); return new TitanH1RecordWriter(taskAttemptContext, tx, persistableKeys); }
if (persistMode == Persist.NOTHING && resultGraphMode == ResultGraph.NEW) { resultgraph = EmptyGraph.instance(); } else if (persistMode != Persist.NOTHING && vertexProgram != null && !vertexProgram.getElementComputeKeys().isEmpty()) { for (String key : vertexProgram.getElementComputeKeys()) { if (!mgmt.containsPropertyKey(key)) log.warn("Property key [{}] is not part of the schema and will be created. It is advised to initialize all keys.", key); AtomicInteger failures = new AtomicInteger(0); try (WorkerPool workers = new WorkerPool(numThreads)) { List<Map.Entry<Long, Map<String, Object>>> subset = new ArrayList<>(writeBatchSize / vertexProgram.getElementComputeKeys().size()); int currentSize = 0; for (Map.Entry<Long, Map<String, Object>> entry : mutatedProperties.entrySet()) {
public FulgoraVertexMemory(int numVertices, final IDManager idManager, final VertexProgram<M> vertexProgram) { Preconditions.checkArgument(numVertices>=0 && vertexProgram!=null && idManager!=null); vertexStates = new NonBlockingHashMapLong<>(numVertices); partitionVertices = new NonBlockingHashMapLong<>(64); this.idManager = idManager; this.combiner = FulgoraUtil.getMessageCombiner(vertexProgram); this.elementKeyMap = getIdMap(vertexProgram.getElementComputeKeys()); this.previousScopes = ImmutableMap.of(); }
@Override public RecordWriter<NullWritable, VertexWritable> getRecordWriter(TaskAttemptContext taskAttemptContext) throws IOException, InterruptedException { synchronized (this) { if (null == graph) { Configuration hadoopConf = taskAttemptContext.getConfiguration(); ModifiableHadoopConfiguration mhc = ModifiableHadoopConfiguration.of(TitanHadoopConfiguration.MAPRED_NS, hadoopConf); graph = (StandardTitanGraph) TitanFactory.open(mhc.getTitanGraphConf()); } } // Special case for a TP3 vertex program: persist only those properties whose keys are // returned by VertexProgram.getComputeKeys() if (null == persistableKeys) { try { persistableKeys = VertexProgram.createVertexProgram(graph, ConfUtil.makeApacheConfiguration(taskAttemptContext.getConfiguration())).getElementComputeKeys(); log.debug("Set persistableKeys={}", Joiner.on(",").join(persistableKeys)); } catch (Exception e) { log.debug("Unable to detect or instantiate vertex program", e); persistableKeys = ImmutableSet.of(); } } StandardTitanTx tx = transactions.computeIfAbsent(taskAttemptContext.getTaskAttemptID(), id -> (StandardTitanTx)graph.newTransaction()); return new TitanH1RecordWriter(taskAttemptContext, tx, persistableKeys); }
@Override public RecordWriter<NullWritable, VertexWritable> getRecordWriter(TaskAttemptContext taskAttemptContext) throws IOException, InterruptedException { synchronized (this) { if (null == graph) { Configuration hadoopConf = taskAttemptContext.getConfiguration(); ModifiableHadoopConfiguration mhc = ModifiableHadoopConfiguration.of(TitanHadoopConfiguration.MAPRED_NS, hadoopConf); graph = (StandardTitanGraph) TitanFactory.open(mhc.getTitanGraphConf()); } } // Special case for a TP3 vertex program: persist only those properties whose keys are // returned by VertexProgram.getComputeKeys() if (null == persistableKeys) { try { persistableKeys = VertexProgram.createVertexProgram(graph, ConfUtil.makeApacheConfiguration(taskAttemptContext.getConfiguration())).getElementComputeKeys(); log.debug("Set persistableKeys={}", Joiner.on(",").join(persistableKeys)); } catch (Exception e) { log.debug("Unable to detect or instantiate vertex program", e); persistableKeys = ImmutableSet.of(); } } StandardTitanTx tx = transactions.computeIfAbsent(taskAttemptContext.getTaskAttemptID(), id -> (StandardTitanTx)graph.newTransaction()); return new TitanH1RecordWriter(taskAttemptContext, tx, persistableKeys); }
if (persistMode == Persist.NOTHING && resultGraphMode == ResultGraph.NEW) { resultgraph = EmptyGraph.instance(); } else if (persistMode != Persist.NOTHING && vertexProgram != null && !vertexProgram.getElementComputeKeys().isEmpty()) { for (String key : vertexProgram.getElementComputeKeys()) { if (!mgmt.containsPropertyKey(key)) log.warn("Property key [{}] is not part of the schema and will be created. It is advised to initialize all keys.", key); AtomicInteger failures = new AtomicInteger(0); try (WorkerPool workers = new WorkerPool(numThreads)) { List<Map.Entry<Long, Map<String, Object>>> subset = new ArrayList<>(writeBatchSize / vertexProgram.getElementComputeKeys().size()); int currentSize = 0; for (Map.Entry<Long, Map<String, Object>> entry : mutatedProperties.entrySet()) {