/** * Lookup a custom property of the dataset. * @param key the name of the property * @param defaultValue the value to return if property does not exist * @return the value of the property or defaultValue if the property does not exist */ public String getProperty(String key, String defaultValue) { return properties.containsKey(key) ? getProperty(key) : defaultValue; }
public Collection<DatasetSpecification> getAll(NamespaceId namespaceId) { Predicate<DatasetSpecification> localDatasetFilter = input -> input != null && !Boolean.parseBoolean(input.getProperty(Constants.AppFabric.WORKFLOW_LOCAL_DATASET_PROPERTY)); return getAll(namespaceId, localDatasetFilter); }
public Collection<DatasetSpecification> getAll(NamespaceId namespaceId) { Predicate<DatasetSpecification> localDatasetFilter = input -> input != null && !Boolean.parseBoolean(input.getProperty(Constants.AppFabric.WORKFLOW_LOCAL_DATASET_PROPERTY)); return getAll(namespaceId, localDatasetFilter); }
@Override public ObjectStoreDataset<?> getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { DatasetSpecification kvTableSpec = spec.getSpecification("objects"); KeyValueTable table = tableDef.getDataset(datasetContext, kvTableSpec, arguments, classLoader); TypeRepresentation typeRep = GSON.fromJson(spec.getProperty("type"), TypeRepresentation.class); Schema schema = GSON.fromJson(spec.getProperty("schema"), Schema.class); return new ObjectStoreDataset(spec.getName(), table, typeRep, schema, classLoader); } }
@Override public ObjectStoreDataset<?> getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { DatasetSpecification kvTableSpec = spec.getSpecification("objects"); KeyValueTable table = tableDef.getDataset(datasetContext, kvTableSpec, arguments, classLoader); TypeRepresentation typeRep = GSON.fromJson(spec.getProperty("type"), TypeRepresentation.class); Schema schema = GSON.fromJson(spec.getProperty("schema"), Schema.class); return new ObjectStoreDataset(spec.getName(), table, typeRep, schema, classLoader); } }
boolean shouldErrorOnMissingSchema) { String schemaStr = spec.getProperty(DatasetProperties.SCHEMA);
boolean shouldErrorOnMissingSchema) { String schemaStr = spec.getProperty(DatasetProperties.SCHEMA);
public RecordDataset(DatasetSpecification spec, KeyValueTable table) throws ClassNotFoundException { super(spec.getName(), table); this.table = table; this.recordClass = Class.forName(spec.getProperty("recordClassName")); }
@Override public DatasetSpecification reconfigure(String instanceName, DatasetProperties newProperties, DatasetSpecification currentSpec) throws IncompatibleUpdateException { // validate that the columns to index property is not null and the same as before String columnNamesToIndex = newProperties.getProperties().get(IndexedTable.INDEX_COLUMNS_CONF_KEY); if (columnNamesToIndex == null) { throw new IllegalArgumentException("columnsToIndex must be specified"); } String oldColumnsToIndex = currentSpec.getProperty(IndexedTable.INDEX_COLUMNS_CONF_KEY); if (!columnNamesToIndex.equals(oldColumnsToIndex)) { Set<byte[]> newColumns = parseColumns(columnNamesToIndex); Set<byte[]> oldColumns = parseColumns(oldColumnsToIndex); if (!newColumns.equals(oldColumns)) { throw new IncompatibleUpdateException(String.format("Attempt to change columns to index from '%s' to '%s'", oldColumnsToIndex, columnNamesToIndex)); } } return super.reconfigure(instanceName, newProperties, currentSpec); }
@Override public MetadataDataset getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { String scope = spec.getProperty(SCOPE_KEY); return new MetadataDataset(indexedTableDef.getDataset(datasetContext, spec.getSpecification(METADATA_INDEX_TABLE_NAME), arguments, classLoader), scope == null ? MetadataScope.USER : MetadataScope.valueOf(scope)); } }
@Override public MetadataDataset getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { String scope = spec.getProperty(SCOPE_KEY); return new MetadataDataset(indexedTableDef.getDataset(datasetContext, spec.getSpecification(METADATA_INDEX_TABLE_NAME), arguments, classLoader), scope == null ? MetadataScope.USER : MetadataScope.valueOf(scope)); } }
DatasetRecordWriter(DatasetAccessor datasetAccessor) { this.datasetAccessor = datasetAccessor; this.recordWritable = datasetAccessor.getDataset(); this.recordType = recordWritable.getRecordType(); if (recordType == StructuredRecord.class) { try { DatasetSpecification datasetSpec = datasetAccessor.getDatasetSpec(); String schemaStr = datasetSpec.getProperty(DatasetProperties.SCHEMA); // should never happen, as this should have been checked at table creation if (schemaStr == null) { throw new IllegalStateException( String.format("Dataset '%s' does not have the schema property.", datasetSpec.getName())); } recordSchema = Schema.parseJson(schemaStr); } catch (IOException | DatasetManagementException e) { try { recordWritable.close(); } catch (IOException e1) { LOG.warn("Exception closing dataset {} after failing to look up its schema.", datasetAccessor.getDatasetId(), e1); } throw new RuntimeException("Unable to look up schema for dataset.", e); } } }
DatasetRecordWriter(DatasetAccessor datasetAccessor) { this.datasetAccessor = datasetAccessor; this.recordWritable = datasetAccessor.getDataset(); this.recordType = recordWritable.getRecordType(); if (recordType == StructuredRecord.class) { try { DatasetSpecification datasetSpec = datasetAccessor.getDatasetSpec(); String schemaStr = datasetSpec.getProperty(DatasetProperties.SCHEMA); // should never happen, as this should have been checked at table creation if (schemaStr == null) { throw new IllegalStateException( String.format("Dataset '%s' does not have the schema property.", datasetSpec.getName())); } recordSchema = Schema.parseJson(schemaStr); } catch (IOException | DatasetManagementException e) { try { recordWritable.close(); } catch (IOException e1) { LOG.warn("Exception closing dataset {} after failing to look up its schema.", datasetAccessor.getDatasetId(), e1); } throw new RuntimeException("Unable to look up schema for dataset.", e); } } }
@Override public DatasetSpecification reconfigure(String instanceName, DatasetProperties newProperties, DatasetSpecification currentSpec) throws IncompatibleUpdateException { String currentValue = currentSpec.getProperty("immutable"); String newValue = newProperties.getProperties().get("immutable"); if (!Objects.equals(currentValue, newValue)) { throw new IncompatibleUpdateException(String.format("Cannot change property 'immutable' from %s to %s", currentValue, newValue)); } return super.reconfigure(instanceName, newProperties, currentSpec); }
@Override public IndexedTable getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { SortedSet<byte[]> columnsToIndex = parseColumns(spec.getProperty(IndexedTable.INDEX_COLUMNS_CONF_KEY)); Table table = getDataset(datasetContext, "d", spec, arguments, classLoader); Table index = getDataset(datasetContext, "i", spec, arguments, classLoader); return new IndexedTable(spec.getName(), table, index, columnsToIndex); }
DatasetProperties.Builder newFileProperties = DatasetProperties.builder() .addAll(properties.getProperties()); String useNameAsBasePathDefault = currentSpec.getProperty(NAME_AS_BASE_PATH_DEFAULT); if (Boolean.parseBoolean(useNameAsBasePathDefault) && !properties.getProperties().containsKey(FileSetProperties.BASE_PATH)) {
@Override public ObjectMappedTableDataset<?> getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { String keyName = ObjectMappedTableProperties.getRowKeyExploreName(spec.getProperties()); DatasetSpecification tableSpec = spec.getSpecification(TABLE_NAME); // if the table spec did not have schema, this is an ObjectMappedTable from CDAP 2.8. // add the schema and row key as arguments so that explore will work // TODO: remove after CDAP-2122 is done if (!tableSpec.getProperties().containsKey(Table.PROPERTY_SCHEMA)) { tableSpec = DatasetSpecification.builder(tableSpec.getName(), tableSpec.getType()) .properties(tableSpec.getProperties()) .property(Table.PROPERTY_SCHEMA, spec.getProperty(Table.PROPERTY_SCHEMA)) .property(Table.PROPERTY_SCHEMA_ROW_FIELD, keyName) .datasets(tableSpec.getSpecifications().values()) .build(); } // reconstruct the table schema here because of backwards compatibility DatasetDefinition<Table, DatasetAdmin> tableDef = getDelegate(TABLE_NAME); Table table = tableDef.getDataset(datasetContext, tableSpec, arguments, classLoader); Map<String, String> properties = spec.getProperties(); TypeRepresentation typeRep = GSON.fromJson( ObjectMappedTableProperties.getObjectTypeRepresentation(properties), TypeRepresentation.class); Schema objSchema = ObjectMappedTableProperties.getObjectSchema(properties); return new ObjectMappedTableDataset(spec.getName(), table, typeRep, objSchema, classLoader); }
@Override public ObjectMappedTableDataset<?> getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { String keyName = ObjectMappedTableProperties.getRowKeyExploreName(spec.getProperties()); DatasetSpecification tableSpec = spec.getSpecification(TABLE_NAME); // if the table spec did not have schema, this is an ObjectMappedTable from CDAP 2.8. // add the schema and row key as arguments so that explore will work // TODO: remove after CDAP-2122 is done if (!tableSpec.getProperties().containsKey(Table.PROPERTY_SCHEMA)) { tableSpec = DatasetSpecification.builder(tableSpec.getName(), tableSpec.getType()) .properties(tableSpec.getProperties()) .property(Table.PROPERTY_SCHEMA, spec.getProperty(Table.PROPERTY_SCHEMA)) .property(Table.PROPERTY_SCHEMA_ROW_FIELD, keyName) .datasets(tableSpec.getSpecifications().values()) .build(); } // reconstruct the table schema here because of backwards compatibility DatasetDefinition<Table, DatasetAdmin> tableDef = getDelegate(TABLE_NAME); Table table = tableDef.getDataset(datasetContext, tableSpec, arguments, classLoader); Map<String, String> properties = spec.getProperties(); TypeRepresentation typeRep = GSON.fromJson( ObjectMappedTableProperties.getObjectTypeRepresentation(properties), TypeRepresentation.class); Schema objSchema = ObjectMappedTableProperties.getObjectSchema(properties); return new ObjectMappedTableDataset(spec.getName(), table, typeRep, objSchema, classLoader); }
@Test public void testSimpleSpec() { String name = "name"; String type = "type"; String propKey = "prop1"; String propVal = "val1"; DatasetSpecification spec = DatasetSpecification.builder(name, type) .property(propKey, propVal) .build(); Assert.assertEquals(name, spec.getName()); Assert.assertEquals(type, spec.getType()); Assert.assertEquals(propVal, spec.getProperty(propKey)); Assert.assertEquals(1, spec.getProperties().size()); Assert.assertTrue(spec.getSpecifications().isEmpty()); }
Assert.assertEquals("12345678", met2.getSpec().getProperty("ttl"));