@Override public Iterable<? extends String> doOperation(final GetAllGraphIds operation, final Context context, final Store store) throws OperationException { return ((FederatedStore) store).getAllGraphIds(context.getUser()); } }
@Override public CloseableIterable<? extends Element> doOperation(final GetAllElements operation, final Context context, final Store store) throws OperationException { return doOperation(operation, context.getUser(), (AccumuloStore) store); }
@Override public CloseableIterable<? extends Element> doOperation(final SummariseGroupOverRanges operation, final Context context, final Store store) throws OperationException { return doOperation(operation, context.getUser(), (AccumuloStore) store); }
@Override public CloseableIterable<? extends Element> doOperation(final GetElements operation, final Context context, final Store store) throws OperationException { return doOperation(operation, context.getUser(), (AccumuloStore) store); }
@Override public CloseableIterable<? extends Element> doOperation(final GetElementsInRanges operation, final Context context, final Store store) throws OperationException { return doOperation(operation, context.getUser(), (AccumuloStore) store); }
@Override public void preExecute(final OperationChain<?> opChain, final Context context) { resolveNamedOperations(opChain, context.getUser()); }
/** * Logs the operation chain and the user id. * * @param opChain the operation chain being executed * @param context the Context executing the operation chain */ @Override public void preExecute(final OperationChain<?> opChain, final Context context) { LOGGER.info("Running {} as {}", context.getOriginalOpChain(), context.getUser().getUserId()); }
@Override public <T> T onFailure(final T result, final OperationChain<?> opChain, final Context context, final Exception e) { LOGGER.warn("Failed to run {} as {}", context.getOriginalOpChain(), context.getUser().getUserId()); return result; } }
/** * @param config configuration containing optional graphIds * @param context the user context to match visibility against. * @return merged schema of the visible graphs. */ public Schema getSchema(final Map<String, String> config, final Context context) { if (null == context) { // no context then return an empty schema return new Schema(); } return getSchema(config, context.getUser()); }
@Override public void preExecute(final OperationChain<?> opChain, final Context context) { if (applyToUser(context.getUser())) { updateView(opChain); } }
@Override @SuppressWarnings("unchecked") public void add(final String key, final Iterable<?> elements) throws OperationException { if (null == elements) { return; } graph.execute(new AddElements.Builder() .input((Iterable<Element>) elements) .build(), context.getUser()); }
public CloseableIterable<? extends EntityId> doOperation(final GetAdjacentIds op, final User user, final AccumuloStore store) throws OperationException { try { return new AccumuloAdjacentIdRetriever(store, op, user); } catch (final IteratorSettingException | StoreException e) { throw new OperationException(e.getMessage(), e); } } }
@Override public Void doOperation(final RemoveGraph operation, final Context context, final Store store) throws OperationException { try { ((FederatedStore) store).remove(operation.getGraphId(), context.getUser()); } catch (final Exception e) { throw new OperationException("Error removing graph: " + operation.getGraphId(), e); } return null; } }
private CloseableIterable<Element> doOperation(final GetAllElements operation, final ParquetStore store, final User user) throws OperationException { try { return new ParquetElementRetriever(operation.getView(), store, operation.getDirectedType(), null, null, null, user); } catch (final StoreException e) { throw new OperationException("Failed to get elements", e); } } }
private RDD<Element> doOperationUsingElementInputFormat(final GetRDDOfAllElements operation, final Context context, final AccumuloStore accumuloStore) throws OperationException { final Configuration conf = getConfiguration(operation); addIterators(accumuloStore, conf, context.getUser(), operation); final String useBatchScannerRDD = operation.getOption(USE_BATCH_SCANNER_RDD); if (Boolean.parseBoolean(useBatchScannerRDD)) { InputConfigurator.setBatchScan(AccumuloInputFormat.class, conf, true); } final RDD<Tuple2<Element, NullWritable>> pairRDD = SparkContextUtil.getSparkSession(context, accumuloStore.getProperties()).sparkContext().newAPIHadoopRDD(conf, ElementInputFormat.class, Element.class, NullWritable.class); return pairRDD.map(new FirstElement(), ELEMENT_CLASS_TAG); }
public CloseableIterable<? extends Element> doOperation(final GetElementsBetweenSets operation, final User user, final AccumuloStore store) throws OperationException { try { final IteratorSettingFactory iteratorFactory = store.getKeyPackage().getIteratorFactory(); return new AccumuloIDBetweenSetsRetriever(store, operation, user, iteratorFactory.getElementPreAggregationFilterIteratorSetting(operation.getView(), store), iteratorFactory.getElementPostAggregationFilterIteratorSetting(operation.getView(), store), iteratorFactory.getEdgeEntityDirectionFilterIteratorSetting(operation), iteratorFactory.getQueryTimeAggregatorIteratorSetting(operation.getView(), store)); } catch (final IteratorSettingException | StoreException e) { throw new OperationException("Failed to get elements", e); } }
public CloseableIterable<? extends Element> doOperation(final GetElementsWithinSet operation, final User user, final AccumuloStore store) throws OperationException { try { final IteratorSettingFactory iteratorFactory = store.getKeyPackage().getIteratorFactory(); return new AccumuloIDWithinSetRetriever(store, operation, user, iteratorFactory.getElementPreAggregationFilterIteratorSetting(operation.getView(), store), iteratorFactory.getElementPostAggregationFilterIteratorSetting(operation.getView(), store), iteratorFactory.getEdgeEntityDirectionFilterIteratorSetting(operation), iteratorFactory.getQueryTimeAggregatorIteratorSetting(operation.getView(), store)); } catch (final IteratorSettingException | StoreException e) { throw new OperationException("Failed to get elements", e); } }
@Override protected OtherGraphExporter createExporter(final ExportToOtherAuthorisedGraph export, final Context context, final Store store) { return new OtherGraphExporter(context, new AuthorisedGraphForExportDelegate.Builder() .store(store) .graphId(export.getGraphId()) .parentSchemaIds(export.getParentSchemaIds()) .parentStorePropertiesId(export.getParentStorePropertiesId()) .idAuths(idAuths) .user(context.getUser()) .build()); } }
private JavaRDD<Element> doOperation(final GetJavaRDDOfElementsInRanges operation, final Context context, final AccumuloStore accumuloStore) throws OperationException { final JavaSparkContext sparkContext = JavaSparkContext.fromSparkContext(SparkContextUtil.getSparkSession(context, accumuloStore.getProperties()).sparkContext()); final Configuration conf = getConfiguration(operation); // Use batch scan option when performing seeded operation InputConfigurator.setBatchScan(AccumuloInputFormat.class, conf, true); addIterators(accumuloStore, conf, context.getUser(), operation); addRangesFromPairs(accumuloStore, conf, operation); final JavaPairRDD<Element, NullWritable> pairRDD = sparkContext.newAPIHadoopRDD(conf, ElementInputFormat.class, Element.class, NullWritable.class); final JavaRDD<Element> rdd = pairRDD.map(new FirstElement()); return rdd; }
private JavaRDD<Element> doOperation(final GetJavaRDDOfElements operation, final Context context, final AccumuloStore accumuloStore) throws OperationException { final JavaSparkContext sparkContext = JavaSparkContext.fromSparkContext(SparkContextUtil.getSparkSession(context, accumuloStore.getProperties()).sparkContext()); final Configuration conf = getConfiguration(operation); // Use batch scan option when performing seeded operation InputConfigurator.setBatchScan(AccumuloInputFormat.class, conf, true); addIterators(accumuloStore, conf, context.getUser(), operation); addRanges(accumuloStore, conf, operation); final JavaPairRDD<Element, NullWritable> pairRDD = sparkContext.newAPIHadoopRDD(conf, ElementInputFormat.class, Element.class, NullWritable.class); final JavaRDD<Element> rdd = pairRDD.map(new FirstElement()); return rdd; }