public FederatedOperationChain() { this(new OperationChain()); }
static boolean hasFederatedOperations(final OperationChain<?> operationChain) { for (final Operation operation : operationChain.getOperations()) { if (operation instanceof FederatedOperation) { return true; } } return false; } }
@Override public void close() throws IOException { operationChain.close(); }
@Override public void preExecute(final OperationChain<?> opChain, final Context context) { if (!edges.isEmpty() || !entities.isEmpty()) { final List<Operation> updatedOps = new ArrayList<>(); for (final Operation op : new ArrayList<>(opChain.flatten())) { updatedOps.add(op); if (OperationView.hasView(op)) { updatedOps.addAll(migrateOperation(op)); } } opChain.updateOperations(updatedOps); } }
public FederatedOperationChain<I, O_ITEM> shallowClone() throws CloneFailedException { return new FederatedOperationChain.Builder<I, O_ITEM>() .operationChain(operationChain.shallowClone()) .options(options) .input(getInput()) .build(); }
protected CloseableIterable<O_ITEM> mergeResults(final List<Object> results, final FederatedOperationChain<I, O_ITEM> operation, final Context context, final Store store) { if (Void.class.equals(operation.getOperationChain().getOutputClass())) { return null; } if (results.isEmpty()) { return null; } if (1 == results.size() && results.get(0) instanceof Iterable) { return new WrappedCloseableIterable(((Iterable) results.get(0))); } boolean areIterable = true; for (final Object result : results) { if (!(result instanceof Iterable)) { areIterable = false; break; } } if (areIterable) { return new ChainedIterable(CollectionUtil.toIterableArray((List) results)); } return new WrappedCloseableIterable(results); } }
opChain.updateOperations(newOpList); } catch (final Exception e) {
public <O> O executeOpChainViaUrl(final OperationChain<O> opChain, final Context context) throws OperationException { final String opChainJson; try { opChainJson = new String(JSONSerialiser.serialise(opChain), CommonConstants.UTF_8); } catch (final UnsupportedEncodingException | SerialisationException e) { throw new OperationException("Unable to serialise operation chain into JSON.", e); } final URL url = getProperties().getGafferUrl("graph/operations/execute"); try { return doPost(url, opChainJson, opChain.getOutputTypeReference(), context); } catch (final StoreException e) { throw new OperationException(e.getMessage(), e); } }
private void updateView(final OperationChain<?> opChain) { for (final Operation operation : opChain.flatten()) { if (operation instanceof OperationView) { final OperationView operationView = (OperationView) operation; final View.Builder viewBuilder = mergeView(operationView, getViewToMerge()); if ((null != whiteListElementGroups && !whiteListElementGroups.isEmpty()) || (null != blackListElementGroups && !blackListElementGroups.isEmpty())) { viewBuilder.removeEntities(this::removeElementGroups); viewBuilder.removeEdges(this::removeElementGroups); } if (!addExtraGroups && null != operationView.getView()) { final Set<String> entityGroups = operationView.getView().getEntityGroups(); viewBuilder.removeEntities(grp -> null == entityGroups || !entityGroups.contains(grp.getKey())); final Set<String> edgeGroups = operationView.getView().getEdgeGroups(); viewBuilder.removeEdges(grp -> null == edgeGroups || !edgeGroups.contains(grp.getKey())); } viewBuilder.expandGlobalDefinitions(); operationView.setView(viewBuilder.build()); } } }
@Override public CloseableIterable<O_ITEM> doOperation(final FederatedOperationChain<I, O_ITEM> operation, final Context context, final Store store) throws OperationException { final Collection<Graph> graphs = ((FederatedStore) store).getGraphs(context.getUser(), operation.getOption(KEY_OPERATION_OPTIONS_GRAPH_IDS)); final List<Object> results = new ArrayList<>(graphs.size()); for (final Graph graph : graphs) { final OperationChain opChain = operation.getOperationChain(); OperationHandlerUtil.updateOperationInput(opChain, operation.getInput()); final OperationChain updatedOp = FederatedStoreUtil.updateOperationForGraph(opChain, graph); if (null != updatedOp) { Object result = null; try { result = graph.execute(updatedOp, context); } catch (final Exception e) { if (!Boolean.valueOf(updatedOp.getOption(KEY_SKIP_FAILED_FEDERATED_STORE_EXECUTE))) { throw new OperationException(FederatedStoreUtil.createOperationErrorMsg(operation, graph.getGraphId(), e), e); } } if (null != result) { results.add(result); } } } return mergeResults(results, operation, context, store); }
final OperationChain clonedOpChain = request.getOperationChain().shallowClone(); O result = null; try {
public FederatedOperationChain(final Operation... operations) { this(new OperationChain(operations)); }
/** * Injects the input of the NamedOperation into the first operation in the OperationChain. This is used when * chaining NamedOperations together. * * @param opChain the resolved operation chain * @param input the input of the NamedOperation */ private void updateOperationInput(final OperationChain<?> opChain, final Object input) { final Operation firstOp = opChain.getOperations().get(0); if (null != input && (firstOp instanceof Input) && null == ((Input) firstOp).getInput()) { ((Input) firstOp).setInput(input); } } }
public Builder() { super(new FederatedOperationChain<>(new OperationChain())); }
/** * Checks the {@link Operation}s in the provided {@link OperationChain} * are allowed to be executed by the user. * This is done by checking the user's auths against the operation auths. * If an operation cannot be executed then an {@link IllegalAccessError} is thrown. * * @param context the user to authorise. * @param opChain the operation chain. */ @Override public void preExecute(final OperationChain<?> opChain, final Context context) { if (null != opChain) { for (final Operation operation : opChain.getOperations()) { authorise(operation, context.getUser()); } authorise(opChain, context.getUser()); } }
private List<Operation> migrateOperation(final Operation op) { final OperationView opView = OperationView.class.cast(op); final Map<String, ViewMigration> migratedEntities = migrateViewElements(entities, opView.getView()::getEntity); final Map<String, ViewMigration> migratedEdges = migrateViewElements(edges, opView.getView()::getEdge); final View.Builder viewBuilder = new View.Builder().merge(opView.getView()); for (final Map.Entry<String, ViewMigration> entry : migratedEntities.entrySet()) { viewBuilder.entity(entry.getKey(), entry.getValue().buildViewElementDefinition()); } for (final Map.Entry<String, ViewMigration> entry : migratedEdges.entrySet()) { viewBuilder.edge(entry.getKey(), entry.getValue().buildViewElementDefinition()); } viewBuilder.config(ViewValidator.SKIP_VIEW_VALIDATION, TRUE); final View updatedView = viewBuilder.build(); LOGGER.debug("Migrated view: {}", updatedView); opView.setView(updatedView); final List<Operation> migrationOps = ViewMigration.createMigrationOps(aggregateAfter, migratedEdges.values(), migratedEntities.values()); if (LOGGER.isDebugEnabled()) { try { LOGGER.debug("Migrated operations: {}", StringUtil.toString(JSONSerialiser.serialise(new OperationChain<>(migrationOps), true))); } catch (final SerialisationException e) { LOGGER.debug("Failed to json serialise the migration operations: {}", new OperationChain<>(migrationOps)); } } return migrationOps; }
private List<Operation> resolveNamedOperation(final NamedOperation namedOp, final User user) { final NamedOperationDetail namedOpDetail; try { namedOpDetail = cache.getNamedOperation(namedOp.getOperationName(), user); } catch (final CacheOperationFailedException e) { // Unable to find named operation - just return the original named operation return Collections.singletonList(namedOp); } final OperationChain<?> namedOperationChain = namedOpDetail.getOperationChain(namedOp.getParameters()); updateOperationInput(namedOperationChain, namedOp.getInput()); // Call resolveNamedOperations again to check there are no nested named operations resolveNamedOperations(namedOperationChain, user); return namedOperationChain.getOperations(); }
public void doOperation(final OP operation, final Context context, final AccumuloStore store) throws OperationException { final String outputPath = getOutputPath(operation); if (null == outputPath || outputPath.isEmpty()) { throw new OperationException("Option outputPath must be set for this option to be run against the accumulostore"); } final String failurePath = getFailurePath(operation); if (null == failurePath || failurePath.isEmpty()) { throw new OperationException("Option failurePath must be set for this option to be run against the accumulostore"); } prepareKeyValues(operation, new AccumuloKeyRangePartitioner(store)); final ImportAccumuloKeyValueFiles importAccumuloKeyValueFiles = new ImportAccumuloKeyValueFiles.Builder() .inputPath(outputPath) .failurePath(failurePath) .build(); store.execute(new OperationChain<>(importAccumuloKeyValueFiles), context); }
public void doOperation(final ImportRDDOfElements operation, final Context context, final AccumuloStore store) throws OperationException { final String outputPath = operation.getOption(OUTPUT_PATH); if (null == outputPath || outputPath.isEmpty()) { throw new OperationException("Option outputPath must be set for this option to be run against the accumulostore"); } final String failurePath = operation.getOption(FAILURE_PATH); if (null == failurePath || failurePath.isEmpty()) { throw new OperationException("Option failurePath must be set for this option to be run against the accumulostore"); } final ElementConverterFunction func = new ElementConverterFunction(SparkContextUtil.getSparkSession(context, store.getProperties()).sparkContext().broadcast(store.getKeyPackage().getKeyConverter(), ACCUMULO_ELEMENT_CONVERTER_CLASS_TAG)); final RDD<Tuple2<Key, Value>> rdd = operation.getInput().flatMap(func, TUPLE2_CLASS_TAG); final ImportKeyValuePairRDDToAccumulo op = new ImportKeyValuePairRDDToAccumulo.Builder() .input(rdd) .failurePath(failurePath) .outputPath(outputPath) .build(); store.execute(new OperationChain<>(op), context); } }
public void doOperation(final ImportJavaRDDOfElements operation, final Context context, final AccumuloStore store) throws OperationException { final String outputPath = operation.getOption(OUTPUT_PATH); if (null == outputPath || outputPath.isEmpty()) { throw new OperationException("Option outputPath must be set for this option to be run against the accumulostore"); } final String failurePath = operation.getOption(FAILURE_PATH); if (null == failurePath || failurePath.isEmpty()) { throw new OperationException("Option failurePath must be set for this option to be run against the accumulostore"); } final SparkContext sparkContext = SparkContextUtil.getSparkSession(context, store.getProperties()).sparkContext(); final Broadcast<AccumuloElementConverter> broadcast = JavaSparkContext.fromSparkContext(sparkContext).broadcast(store.getKeyPackage().getKeyConverter()); final ElementConverterFunction func = new ElementConverterFunction(broadcast); final JavaPairRDD<Key, Value> rdd = operation.getInput().flatMapToPair(func); final ImportKeyValueJavaPairRDDToAccumulo op = new ImportKeyValueJavaPairRDDToAccumulo.Builder() .input(rdd) .failurePath(failurePath) .outputPath(outputPath) .build(); store.execute(new OperationChain(op), context); } }