@Override public DatasetSpecification configure(String instanceName, DatasetProperties properties) { List<DatasetSpecification> specs = this.delegates.entrySet().stream() .map(impl -> impl.getValue().configure(impl.getKey(), properties)) .collect(Collectors.toList()); return DatasetSpecification.builder(instanceName, getName()) .properties(properties.getProperties()) .datasets(specs) .build(); }
@Override public DatasetSpecification configure(String instanceName, DatasetProperties properties) { DatasetProperties factTableProperties = computeFactTableProperties(properties); List<DatasetSpecification> datasetSpecs = Lists.newArrayList(); // Configuring table that hold mappings of tag names and values and such datasetSpecs.add(metricsTableDef.configure("entity", properties)); // NOTE: we create a table per resolution; we later will use that to e.g. configure ttl separately for each for (int resolution : getResolutions(properties.getProperties())) { datasetSpecs.add(tableDef.configure(String.valueOf(resolution), factTableProperties)); } return DatasetSpecification.builder(instanceName, getName()) .properties(properties.getProperties()) .datasets(datasetSpecs) .build(); }
public DatasetSpecification configure(String instanceName, DatasetProperties properties) { DatasetSpecification spec = delegate.configure(instanceName, properties); spec = spec.setOriginalProperties(properties); if (properties.getDescription() != null) { spec = spec.setDescription(properties.getDescription()); } return spec; }
@Override public DatasetSpecification configure(String instanceName, DatasetProperties properties) { String[] indexColumns = MetadataDataset.INDEX_COLUMNS.stream() .flatMap(indexCol -> Stream.of(indexCol.getCrossNamespaceColumn(), indexCol.getColumn())) .toArray(String[]::new); return DatasetSpecification.builder(instanceName, getName()) .properties(properties.getProperties()) .datasets( indexedTableDef.configure( METADATA_INDEX_TABLE_NAME, addIndexColumns(properties, indexColumns) ) ) .build(); }
public DatasetSpecification configure(String instanceName, DatasetProperties properties) { DatasetSpecification spec = delegate.configure(instanceName, properties); spec = spec.setOriginalProperties(properties); if (properties.getDescription() != null) { spec = spec.setDescription(properties.getDescription()); } return spec; }
@Override public DatasetSpecification configure(String instanceName, DatasetProperties properties) { return DatasetSpecification.builder(instanceName, getName()) .properties(properties.getProperties()) .datasets(tableDef.configure("objects", properties)) .build(); }
@Override public DatasetSpecification configure(String name, DatasetProperties properties) { return DatasetSpecification.builder(name, getName()) .properties(properties.getProperties()) .datasets(tableDefinition.configure(STATE_STORE_EMBEDDED_TABLE_NAME, properties)) .build(); }
@Override public DatasetSpecification configure(String instanceName, DatasetProperties properties) { return DatasetSpecification.builder(instanceName, getName()) .properties(properties.getProperties()) .datasets(tableDef.configure("objects", properties)) .build(); }
@Override public DatasetSpecification configure(String instanceName, DatasetProperties properties) { TableProperties.Builder tableProps = TableProperties.builder(); tableProps.addAll(properties.getProperties()); tableProps.setConflictDetection(ConflictDetection.COLUMN); Preconditions.checkNotNull(properties.getProperties().get(Constants.Scheduler.JOB_QUEUE_NUM_PARTITIONS), "Property '{}' must be given when creating job queue dataset", Constants.Scheduler.JOB_QUEUE_NUM_PARTITIONS); DatasetSpecification tableSpec = super.getDelegate(JobQueueDataset.EMBEDDED_TABLE_NAME).configure( JobQueueDataset.EMBEDDED_TABLE_NAME, tableProps.build()); return DatasetSpecification.builder(instanceName, getName()) .properties(properties.getProperties()) .datasets(Collections.singletonList(tableSpec)) .build(); }
@Override public DatasetSpecification configure(String instanceName, DatasetProperties properties) { return DatasetSpecification.builder(instanceName, getName()) .properties(properties.getProperties()) .datasets(tableDef.configure("objects", checkAndRemoveSchema(properties))) .build(); }
@Override public DatasetSpecification configure(String instanceName, DatasetProperties properties) { return DatasetSpecification.builder(instanceName, getName()) .properties(properties.getProperties()) .datasets(tableDef.configure("objects", checkAndRemoveSchema(properties))) .build(); }
private static DatasetSpecification createType2Spec(String instanceName, String typeName, String description, DatasetProperties properties) { return new TestModule2().createDefinition(typeName).configure(instanceName, properties) .setOriginalProperties(properties).setDescription(description); }
@Override public DatasetSpecification configure(String name, DatasetProperties properties) { TableProperties.Builder indexProps = TableProperties.builder(); indexProps.addAll(properties.getProperties()); indexProps.add(IndexedTable.INDEX_COLUMNS_CONF_KEY, ProgramScheduleStoreDataset.INDEX_COLUMNS); indexProps.setConflictDetection(ConflictDetection.COLUMN); DatasetSpecification indexSpec = getDelegate(ProgramScheduleStoreDataset.EMBEDDED_TABLE_NAME) .configure(ProgramScheduleStoreDataset.EMBEDDED_TABLE_NAME, indexProps.build()); return DatasetSpecification.builder(name, getName()).datasets(indexSpec).build(); }
@Override public DatasetSpecification configure(String instanceName, DatasetProperties properties) { // Use ConflictDetection.NONE as we only need a flag whether a program uses a dataset/stream. // Having conflict detection will lead to failures when programs start, and all try to register at the same time. DatasetProperties datasetProperties = TableProperties.builder() .setConflictDetection(ConflictDetection.NONE) .addAll(properties.getProperties()) .build(); DatasetSpecification spec = tableDefinition.configure(instanceName, datasetProperties); return DatasetSpecification.builder(instanceName, getName()) .properties(spec.getProperties()) .build(); }
@Override public DatasetSpecification configure(String instanceName, DatasetProperties properties) { // Use ConflictDetection.NONE as we only need a flag whether a program uses a dataset/stream. // Having conflict detection will lead to failures when programs start, and all try to register at the same time. DatasetProperties datasetProperties = TableProperties.builder() .setConflictDetection(ConflictDetection.NONE) .addAll(properties.getProperties()) .build(); DatasetSpecification spec = tableDefinition.configure(instanceName, datasetProperties); return DatasetSpecification.builder(instanceName, getName()) .properties(spec.getProperties()) .build(); }
@Override public DatasetSpecification configure(String name, DatasetProperties properties) { TableProperties.Builder indexProps = TableProperties.builder(); indexProps.addAll(properties.getProperties()); indexProps.add(IndexedTable.INDEX_COLUMNS_CONF_KEY, ProgramScheduleStoreDataset.INDEX_COLUMNS); indexProps.setConflictDetection(ConflictDetection.COLUMN); DatasetSpecification indexSpec = getDelegate(ProgramScheduleStoreDataset.EMBEDDED_TABLE_NAME) .configure(ProgramScheduleStoreDataset.EMBEDDED_TABLE_NAME, indexProps.build()); return DatasetSpecification.builder(name, getName()).datasets(indexSpec).build(); }
@Override public DatasetSpecification configure(String instanceName, DatasetProperties properties) { return DatasetSpecification.builder(instanceName, getName()) .properties(properties.getProperties()) .datasets(tableDef.configure("table", properties)) .build(); }
@Override public DatasetSpecification configure(String instanceName, DatasetProperties properties) { String schemaString = getSchemaString(properties); return DatasetSpecification.builder(instanceName, RecordDataset.class.getName()) .properties(properties.getProperties()) .property("schema", schemaString) .datasets(getDelegate("kv").configure("kv", DatasetProperties.EMPTY)) .build(); }
@Override public DatasetSpecification configure(String instanceName, DatasetProperties properties) { return DatasetSpecification.builder(instanceName, getName()) .properties(properties.getProperties()) .datasets(tableDef.configure("kv", properties)) .build(); }
private void testFix(String type, DatasetProperties props) { DatasetDefinition def = DatasetFrameworkTestUtil.getDatasetDefinition( inMemoryDatasetFramework, NamespaceId.DEFAULT, type); Assert.assertNotNull(def); DatasetSpecification spec = def.configure("nn", props); Map<String, String> originalProperties = DatasetsUtil.fixOriginalProperties(spec).getOriginalProperties(); Assert.assertEquals(props.getProperties(), originalProperties); } }