@Override public Schema resolveSchemaForGraph(final Store store, final Schema schema, final List<String> parentSchemaIds, final Pair<Schema, StoreProperties> existingGraphPair) { Schema resultSchema = super.resolveSchemaForGraph(store, schema, parentSchemaIds, existingGraphPair); if (null == resultSchema) { // If no schemas have been provided then default to using the store schema resultSchema = store.getSchema(); } return resultSchema; }
@Override protected Schema resolveSchemaForGraph(final Store store, final Schema schema, final List<String> parentSchemaIds, final Pair<Schema, StoreProperties> existingGraphPair) { Schema resultSchema = super.resolveSchemaForGraph(store, schema, parentSchemaIds, existingGraphPair); if (null == resultSchema) { // If no schemas have been provided then default to using the store schema resultSchema = store.getSchema(); } return resultSchema; }
@Override public JobConf createJobConf(final SampleDataForSplitPoints operation, final String mapperGeneratorClassName, final Store store) throws IOException { final JobConf jobConf = new JobConf(new Configuration()); LOGGER.info("Setting up job conf"); jobConf.set(SCHEMA, new String(store.getSchema().toCompactJson(), CommonConstants.UTF_8)); LOGGER.info("Added {} {} to job conf", SCHEMA, new String(store.getSchema().toCompactJson(), CommonConstants.UTF_8)); jobConf.set(MAPPER_GENERATOR, mapperGeneratorClassName); LOGGER.info("Added {} of {} to job conf", MAPPER_GENERATOR, mapperGeneratorClassName); jobConf.set(VALIDATE, String.valueOf(operation.isValidate())); LOGGER.info("Added {} option of {} to job conf", VALIDATE, operation.isValidate()); jobConf.set(PROPORTION_TO_SAMPLE, String.valueOf(operation.getProportionToSample())); LOGGER.info("Added {} option of {} to job conf", PROPORTION_TO_SAMPLE, String.valueOf(operation.getProportionToSample())); final Integer numTasks = operation.getNumMapTasks(); if (null != numTasks) { jobConf.setNumMapTasks(numTasks); LOGGER.info("Set number of map tasks to {} on job conf", numTasks); } jobConf.setNumReduceTasks(1); LOGGER.info("Set number of reduce tasks to 1 on job conf"); jobConf.set(AccumuloStoreConstants.ACCUMULO_ELEMENT_CONVERTER_CLASS, ((AccumuloStore) store).getKeyPackage().getKeyConverter().getClass().getName()); return jobConf; }
@Override public JobConf createJobConf(final AddElementsFromHdfs operation, final String mapperGeneratorClassName, final Store store) throws IOException { final JobConf jobConf = new JobConf(new Configuration()); LOGGER.info("Setting up job conf"); jobConf.set(SCHEMA, new String(store.getSchema().toCompactJson(), CommonConstants.UTF_8)); LOGGER.debug("Added {} {} to job conf", SCHEMA, new String(store.getSchema().toCompactJson(), CommonConstants.UTF_8)); jobConf.set(MAPPER_GENERATOR, mapperGeneratorClassName); LOGGER.info("Added {} of {} to job conf", MAPPER_GENERATOR, mapperGeneratorClassName); jobConf.set(VALIDATE, String.valueOf(operation.isValidate())); LOGGER.info("Added {} option of {} to job conf", VALIDATE, operation.isValidate()); if (null != operation.getNumMapTasks()) { jobConf.setNumMapTasks(operation.getNumMapTasks()); LOGGER.info("Set number of map tasks to {} on job conf", operation.getNumMapTasks()); } if (null != operation.getNumReduceTasks()) { jobConf.setNumReduceTasks(operation.getNumReduceTasks()); LOGGER.info("Set number of reduce tasks to {} on job conf", operation.getNumReduceTasks()); } jobConf.set(AccumuloStoreConstants.ACCUMULO_ELEMENT_CONVERTER_CLASS, ((AccumuloStore) store).getKeyPackage().getKeyConverter().getClass().getName()); return jobConf; }
@Override public Dataset<Row> doOperation(final GetDataFrameOfElements operation, final Context context, final Store store) throws OperationException { final SparkSession spark = SparkContextUtil.getSparkSession(context, store.getProperties()); final User user = context.getUser(); final Authorisations auths; final String visibility; if (user != null && user.getDataAuths() != null) { auths = new Authorisations(user.getDataAuths().toArray(new String[user.getDataAuths().size()])); } else { auths = new Authorisations(); } if (store.getSchema().getVisibilityProperty() != null) { visibility = store.getSchema().getVisibilityProperty(); } else { visibility = new String(); } return doOperation(operation, (ParquetStore) store, spark, auths, visibility); }
@Override public Void doOperation(final AddElements addElements, final Context context, final Store store) throws OperationException { Iterable<? extends Element> elements = addElements.getInput(); if (addElements.isValidate()) { elements = new ValidatedElements(elements, store.getSchema(), addElements.isSkipInvalidElements()); } addElements(elements, (MapStore) store); return null; }
public <OP extends Validatable & Operation> GafferAdder(final OP operation, final Store store) { this.store = store; this.validate = operation.isValidate(); this.skipInvalid = operation.isSkipInvalidElements(); final String maxQueueSizeOption = operation.getOption(FlinkConstants.MAX_QUEUE_SIZE); this.maxQueueSize = null != maxQueueSizeOption ? Integer.parseInt(maxQueueSizeOption) : MAX_QUEUE_SIZE_DEFAULT; graphId = store.getGraphId(); schema = store.getSchema().toCompactJson(); properties = store.getProperties().getProperties(); }
private void updateStore(final GraphConfig config) { if (null == store) { store = Store.createStore(config.getGraphId(), cloneSchema(schema), properties); } else if ((null != config.getGraphId() && !config.getGraphId().equals(store.getGraphId())) || (null != schema) || (null != properties && !properties.equals(store.getProperties()))) { if (null == config.getGraphId()) { config.setGraphId(store.getGraphId()); } if (null == schema || schema.getGroups().isEmpty()) { schema = store.getSchema(); } if (null == properties) { properties = store.getProperties(); } try { store.initialise(config.getGraphId(), cloneSchema(schema), properties); } catch (final StoreException e) { throw new IllegalArgumentException("Unable to initialise the store with the given graphId, schema and properties", e); } } store.setGraphLibrary(config.getLibrary()); if (null == schema || schema.getGroups().isEmpty()) { schema = store.getSchema(); } }