@Override public DatasetAdmin getAdmin(DatasetContext datasetContext, DatasetSpecification spec, ClassLoader classLoader) throws IOException { return tableDef.getAdmin(datasetContext, spec.getSpecification("objects"), classLoader); }
@Override public DatasetAdmin getAdmin(DatasetContext datasetContext, DatasetSpecification spec, ClassLoader classLoader) throws IOException { return tableDef.getAdmin(datasetContext, spec.getSpecification("objects"), classLoader); }
@Override public DatasetAdmin getAdmin(DatasetContext datasetContext, DatasetSpecification spec, ClassLoader classLoader) throws IOException { return indexedTableDef.getAdmin(datasetContext, spec.getSpecification(METADATA_INDEX_TABLE_NAME), classLoader); }
@Override public DatasetAdmin getAdmin(DatasetContext datasetContext, DatasetSpecification spec, ClassLoader classLoader) throws IOException { return indexedTableDef.getAdmin(datasetContext, spec.getSpecification(METADATA_INDEX_TABLE_NAME), classLoader); }
@Override public DatasetAdmin getAdmin(DatasetContext datasetContext, DatasetSpecification spec, ClassLoader classLoader) throws IOException { return tableDef.getAdmin(datasetContext, spec.getSpecification("objects"), classLoader); }
@Override public DatasetAdmin getAdmin(DatasetContext datasetContext, DatasetSpecification spec, ClassLoader classLoader) throws IOException { return new PartitionedFileSetAdmin( datasetContext, spec, getExploreProvider(), ImmutableMap.<String, DatasetAdmin>of( FILESET_NAME, filesetDef.getAdmin(datasetContext, spec.getSpecification(FILESET_NAME), classLoader), PARTITION_TABLE_NAME, indexedTableDef.getAdmin(datasetContext, spec.getSpecification(PARTITION_TABLE_NAME), classLoader))); }
@Override public DatasetAdmin getAdmin(DatasetContext datasetContext, DatasetSpecification spec, ClassLoader classLoader) throws IOException { return new PartitionedFileSetAdmin( datasetContext, spec, getExploreProvider(), ImmutableMap.<String, DatasetAdmin>of( FILESET_NAME, filesetDef.getAdmin(datasetContext, spec.getSpecification(FILESET_NAME), classLoader), PARTITION_TABLE_NAME, indexedTableDef.getAdmin(datasetContext, spec.getSpecification(PARTITION_TABLE_NAME), classLoader))); }
@Override public HBaseConsumerStateStore getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { QueueName queueName = QueueName.from(URI.create(arguments.get(PROPERTY_QUEUE_NAME))); Table table = tableDefinition.getDataset(datasetContext, spec.getSpecification(STATE_STORE_EMBEDDED_TABLE_NAME), arguments, classLoader); return new HBaseConsumerStateStore(spec.getName(), queueName, table); } }
@Override public FakeDataset getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { DatasetSpecification kvTableSpec = spec.getSpecification("objects"); KeyValueTable table = tableDef.getDataset(datasetContext, kvTableSpec, arguments, classLoader); return new FakeDataset(spec.getName(), table); } }
@Override public DatasetAdmin getAdmin(DatasetContext datasetContext, DatasetSpecification spec, ClassLoader classLoader) throws IOException { return tableDef.getAdmin(datasetContext, spec.getSpecification("kv"), classLoader); }
@Override public DatasetAdmin getAdmin(DatasetContext datasetContext, DatasetSpecification spec, ClassLoader classLoader) throws IOException { return tableDef.getAdmin(datasetContext, spec.getSpecification("table"), classLoader); }
@Override public PartitionedFileSet getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { // make any necessary updates to the arguments arguments = updateArgumentsIfNeeded(arguments); FileSet fileset = filesetDef.getDataset(datasetContext, spec.getSpecification(FILESET_NAME), arguments, classLoader); IndexedTable table = indexedTableDef.getDataset(datasetContext, spec.getSpecification(PARTITION_TABLE_NAME), arguments, classLoader); return new TimePartitionedFileSetDataset(datasetContext, spec.getName(), fileset, table, spec, arguments, getExploreProvider()); }
@Override public ObjectStoreDataset<?> getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { DatasetSpecification kvTableSpec = spec.getSpecification("objects"); KeyValueTable table = tableDef.getDataset(datasetContext, kvTableSpec, arguments, classLoader); TypeRepresentation typeRep = GSON.fromJson(spec.getProperty("type"), TypeRepresentation.class); Schema schema = GSON.fromJson(spec.getProperty("schema"), Schema.class); return new ObjectStoreDataset(spec.getName(), table, typeRep, schema, classLoader); } }
@Override public PartitionedFileSet getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { // make any necessary updates to the arguments arguments = updateArgumentsIfNeeded(arguments); FileSet fileset = filesetDef.getDataset(datasetContext, spec.getSpecification(FILESET_NAME), arguments, classLoader); IndexedTable table = indexedTableDef.getDataset(datasetContext, spec.getSpecification(PARTITION_TABLE_NAME), arguments, classLoader); return new TimePartitionedFileSetDataset(datasetContext, spec.getName(), fileset, table, spec, arguments, getExploreProvider()); }
@Override public ObjectStoreDataset<?> getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { DatasetSpecification kvTableSpec = spec.getSpecification("objects"); KeyValueTable table = tableDef.getDataset(datasetContext, kvTableSpec, arguments, classLoader); TypeRepresentation typeRep = GSON.fromJson(spec.getProperty("type"), TypeRepresentation.class); Schema schema = GSON.fromJson(spec.getProperty("schema"), Schema.class); return new ObjectStoreDataset(spec.getName(), table, typeRep, schema, classLoader); } }
@Override public MetadataDataset getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { String scope = spec.getProperty(SCOPE_KEY); return new MetadataDataset(indexedTableDef.getDataset(datasetContext, spec.getSpecification(METADATA_INDEX_TABLE_NAME), arguments, classLoader), scope == null ? MetadataScope.USER : MetadataScope.valueOf(scope)); } }
@Override public MetadataDataset getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { String scope = spec.getProperty(SCOPE_KEY); return new MetadataDataset(indexedTableDef.getDataset(datasetContext, spec.getSpecification(METADATA_INDEX_TABLE_NAME), arguments, classLoader), scope == null ? MetadataScope.USER : MetadataScope.valueOf(scope)); } }
@Override public TestDataset getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { DatasetSpecification kvTableSpec = spec.getSpecification("kv"); KeyValueTable table = tableDef.getDataset( datasetContext, kvTableSpec, DatasetDefinition.NO_ARGUMENTS, classLoader); return new TestDataset(spec, table, arguments); } }
@Override public DatasetSpecification reconfigure(String instanceName, DatasetProperties newProperties, DatasetSpecification currentSpec) throws IncompatibleUpdateException { // TODO (CDAP-6268): validate schema compatibility return DatasetSpecification.builder(instanceName, getName()) .properties(newProperties.getProperties()) .datasets(AbstractDatasetDefinition.reconfigure(tableDef, "objects", checkAndRemoveSchema(newProperties), currentSpec.getSpecification("objects"))) .build(); }
@Override public DatasetSpecification reconfigure(String instanceName, DatasetProperties newProperties, DatasetSpecification currentSpec) throws IncompatibleUpdateException { // TODO (CDAP-6268): validate schema compatibility return DatasetSpecification.builder(instanceName, getName()) .properties(newProperties.getProperties()) .datasets(AbstractDatasetDefinition.reconfigure(tableDef, "objects", checkAndRemoveSchema(newProperties), currentSpec.getSpecification("objects"))) .build(); }