@Override public Iterable<? extends String> doOperation(final GetAllGraphIds operation, final Context context, final Store store) throws OperationException { return ((FederatedStore) store).getAllGraphIds(context.getUser()); } }
public GraphRequest(final Operation operation, final User user) { if (null == operation) { throw new IllegalArgumentException("An operation is required"); } if (null == user) { throw new IllegalArgumentException("A user is required"); } this.operationChain = (OperationChain) OperationChain.wrap(operation); this.context = new Context(user); }
/** * Logs the operation chain and the user id. * * @param opChain the operation chain being executed * @param context the Context executing the operation chain */ @Override public void preExecute(final OperationChain<?> opChain, final Context context) { LOGGER.info("Running {} as {}", context.getOriginalOpChain(), context.getUser().getUserId()); }
@Override protected GafferResultCacheExporter createExporter(final ExportToGafferResultCache export, final Context context, final Store store) { return new GafferResultCacheExporter( context, context.getJobId(), createGraph(store), visibility, export.getOpAuths()); }
/** * Adds a spark session to the given {@link Context}. * * @param context the user context * @param sparkSession the spark session to add to the context */ public static void addSparkSession(final Context context, final SparkSession sparkSession) { context.setConfig(SPARK_CONTEXT_CONFIG_KEY, sparkSession); }
/** * Extracts the {@link SparkSession} from the Context. If there is no * SparkSession in the Context then a new SparkSession instance is created * and added to the context. * * @param context the {@link User} {@link Context} * @param properties the store properties - used to create a spark session if required * @return the {@link SparkSession} */ public static SparkSession getSparkSession(final Context context, final StoreProperties properties) { SparkSession sparkSession = (SparkSession) context.getConfig(SPARK_CONTEXT_CONFIG_KEY); if (null == sparkSession) { sparkSession = createSparkSession(properties); addSparkSession(context, sparkSession); } return sparkSession; }
@Override protected GafferResultCacheExporter createExporter(final GetGafferResultCacheExport export, final Context context, final Store store) { final String jobId = null != export.getJobId() ? export.getJobId() : context.getJobId(); return new GafferResultCacheExporter( context, jobId, createGraph(store), visibility, null); }
@Override public CloseableIterable<? extends Element> doOperation(final GetAllElements operation, final Context context, final Store store) throws OperationException { return doOperation(operation, context.getUser(), (AccumuloStore) store); }
@Override public <T> T onFailure(final T result, final OperationChain<?> opChain, final Context context, final Exception e) { LOGGER.warn("Failed to run {} as {}", context.getOriginalOpChain(), context.getUser().getUserId()); return result; } }
public GraphRequest(final Output<O> operation, final User user) { if (null == operation) { throw new IllegalArgumentException("An operation is required"); } if (null == user) { throw new IllegalArgumentException("A user is required"); } this.operationChain = OperationChain.wrap(operation); this.context = new Context(user); }
public void doOperation(final AddElementsFromHdfs operation, final Context context, final AccumuloStore store) throws OperationException { validateOperation(operation); if (null == operation.getSplitsFilePath()) { final String splitsFilePath = getPathWithSlashSuffix(operation.getWorkingPath()) + context.getJobId() + "/splits"; LOGGER.info("Using working directory for splits files: " + splitsFilePath); operation.setSplitsFilePath(splitsFilePath); } try { checkHdfsDirectories(operation, store); } catch (final IOException e) { throw new OperationException("Operation failed due to filesystem error: " + e.getMessage()); } if (!operation.isUseProvidedSplits() && needsSplitting(store)) { sampleAndSplit(operation, context, store); } fetchElements(operation, store); final String skipImport = operation.getOption(AccumuloStoreConstants.ADD_ELEMENTS_FROM_HDFS_SKIP_IMPORT); if (null == skipImport || !"TRUE".equalsIgnoreCase(skipImport)) { importElements(operation, store); } else { LOGGER.info("Skipping import as {} was {}", AccumuloStoreConstants.ADD_ELEMENTS_FROM_HDFS_SKIP_IMPORT, skipImport); } }
@Override public CloseableIterable<? extends Element> doOperation(final SummariseGroupOverRanges operation, final Context context, final Store store) throws OperationException { return doOperation(operation, context.getUser(), (AccumuloStore) store); }
/** * Create a new Context with the given user and store properties * * @param user the user * @param storeProperties the store properties * @return the new {@link Context}. */ public static Context createContext(final User user, final StoreProperties storeProperties) { final Context context = new Context(user); addSparkSession(context, storeProperties); return context; }
tmpJobWorkingPath = tmpJobWorkingPath + context.getJobId();
@Override public CloseableIterable<? extends Element> doOperation(final GetElements operation, final Context context, final Store store) throws OperationException { return doOperation(operation, context.getUser(), (AccumuloStore) store); }
/** * Create a new Context with the given user and spark session. * * @param user the user * @param sparkSession the spark session * @return the new {@link Context}. */ public static Context createContext(final User user, final SparkSession sparkSession) { final Context context = new Context(user); addSparkSession(context, sparkSession); return context; }
@Override public CloseableIterable<? extends Element> doOperation(final GetElementsInRanges operation, final Context context, final Store store) throws OperationException { return doOperation(operation, context.getUser(), (AccumuloStore) store); }
.skipInvalidElements(skipInvalid) .build(), new Context(new User())); restart = true; } catch (final OperationException e) {
@Override public void preExecute(final OperationChain<?> opChain, final Context context) { resolveNamedOperations(opChain, context.getUser()); }
public GafferResultCacheExporter(final Context context, final String jobId, final Graph resultCache, final String visibility, final Set<String> requiredOpAuths) { this.context = context; this.jobId = jobId; this.resultCache = resultCache; this.visibility = visibility; if (null == requiredOpAuths) { this.requiredOpAuths = CollectionUtil.treeSet(context.getUser().getUserId()); } else { this.requiredOpAuths = new TreeSet<>(requiredOpAuths); } userOpAuths = new HashSet<>(context.getUser().getOpAuths()); userOpAuths.add(context.getUser().getUserId()); }