public D getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments) throws IOException { return delegate.getDataset(datasetContext, spec, arguments, classLoader); } }
public D getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments) throws IOException { return delegate.getDataset(datasetContext, spec, arguments, classLoader); } }
@Override public UsageDataset getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { Table table = tableDefinition.getDataset(datasetContext, spec, arguments, classLoader); return new UsageDataset(table); } }
@Override public UsageDataset getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { Table table = tableDefinition.getDataset(datasetContext, spec, arguments, classLoader); return new UsageDataset(table); } }
@Override public HBaseConsumerStateStore getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { QueueName queueName = QueueName.from(URI.create(arguments.get(PROPERTY_QUEUE_NAME))); Table table = tableDefinition.getDataset(datasetContext, spec.getSpecification(STATE_STORE_EMBEDDED_TABLE_NAME), arguments, classLoader); return new HBaseConsumerStateStore(spec.getName(), queueName, table); } }
@Override public FakeDataset getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { DatasetSpecification kvTableSpec = spec.getSpecification("objects"); KeyValueTable table = tableDef.getDataset(datasetContext, kvTableSpec, arguments, classLoader); return new FakeDataset(spec.getName(), table); } }
@Override public StandaloneDataset getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { DatasetSpecification kvTableSpec = spec.getSpecification("objects"); KeyValueTable table = tableDef.getDataset(datasetContext, kvTableSpec, arguments, classLoader); return new StandaloneDataset(spec.getName(), table); } }
@Override public PartitionedFileSet getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { // make any necessary updates to the arguments arguments = updateArgumentsIfNeeded(arguments); FileSet fileset = filesetDef.getDataset(datasetContext, spec.getSpecification(FILESET_NAME), arguments, classLoader); IndexedTable table = indexedTableDef.getDataset(datasetContext, spec.getSpecification(PARTITION_TABLE_NAME), arguments, classLoader); return new TimePartitionedFileSetDataset(datasetContext, spec.getName(), fileset, table, spec, arguments, getExploreProvider()); }
@Override public ObjectStoreDataset<?> getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { DatasetSpecification kvTableSpec = spec.getSpecification("objects"); KeyValueTable table = tableDef.getDataset(datasetContext, kvTableSpec, arguments, classLoader); TypeRepresentation typeRep = GSON.fromJson(spec.getProperty("type"), TypeRepresentation.class); Schema schema = GSON.fromJson(spec.getProperty("schema"), Schema.class); return new ObjectStoreDataset(spec.getName(), table, typeRep, schema, classLoader); } }
@Override public PartitionedFileSet getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { // make any necessary updates to the arguments arguments = updateArgumentsIfNeeded(arguments); FileSet fileset = filesetDef.getDataset(datasetContext, spec.getSpecification(FILESET_NAME), arguments, classLoader); IndexedTable table = indexedTableDef.getDataset(datasetContext, spec.getSpecification(PARTITION_TABLE_NAME), arguments, classLoader); return new TimePartitionedFileSetDataset(datasetContext, spec.getName(), fileset, table, spec, arguments, getExploreProvider()); }
@Override public ObjectStoreDataset<?> getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { DatasetSpecification kvTableSpec = spec.getSpecification("objects"); KeyValueTable table = tableDef.getDataset(datasetContext, kvTableSpec, arguments, classLoader); TypeRepresentation typeRep = GSON.fromJson(spec.getProperty("type"), TypeRepresentation.class); Schema schema = GSON.fromJson(spec.getProperty("schema"), Schema.class); return new ObjectStoreDataset(spec.getName(), table, typeRep, schema, classLoader); } }
@Override public CubeDataset getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { MetricsTable entityTable = metricsTableDef.getDataset(datasetContext, spec.getSpecification("entity"), arguments, classLoader); int[] resolutions = getResolutions(spec.getProperties()); Map<Integer, Table> resolutionTables = Maps.newHashMap(); for (int resolution : resolutions) { resolutionTables.put(resolution, tableDef.getDataset(datasetContext, spec.getSpecification(String.valueOf(resolution)), arguments, classLoader)); } Map<String, Aggregation> aggregations = getAggregations(spec.getProperties()); return new CubeDataset(spec.getName(), entityTable, resolutionTables, aggregations); }
@Override public CubeDataset getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { MetricsTable entityTable = metricsTableDef.getDataset(datasetContext, spec.getSpecification("entity"), arguments, classLoader); int[] resolutions = getResolutions(spec.getProperties()); Map<Integer, Table> resolutionTables = Maps.newHashMap(); for (int resolution : resolutions) { resolutionTables.put(resolution, tableDef.getDataset(datasetContext, spec.getSpecification(String.valueOf(resolution)), arguments, classLoader)); } Map<String, Aggregation> aggregations = getAggregations(spec.getProperties()); return new CubeDataset(spec.getName(), entityTable, resolutionTables, aggregations); }
@Override public MetadataDataset getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { String scope = spec.getProperty(SCOPE_KEY); return new MetadataDataset(indexedTableDef.getDataset(datasetContext, spec.getSpecification(METADATA_INDEX_TABLE_NAME), arguments, classLoader), scope == null ? MetadataScope.USER : MetadataScope.valueOf(scope)); } }
@Override public MetadataDataset getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { String scope = spec.getProperty(SCOPE_KEY); return new MetadataDataset(indexedTableDef.getDataset(datasetContext, spec.getSpecification(METADATA_INDEX_TABLE_NAME), arguments, classLoader), scope == null ? MetadataScope.USER : MetadataScope.valueOf(scope)); } }
@Override public PartitionedFileSet getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { // properties must contain the partitioning Partitioning partitioning = PartitionedFileSetProperties.getPartitioning(spec.getProperties()); // make any necessary updates to the arguments arguments = updateArgumentsIfNeeded(arguments, partitioning); FileSet fileset = filesetDef.getDataset(datasetContext, spec.getSpecification(FILESET_NAME), arguments, classLoader); IndexedTable table = indexedTableDef.getDataset(datasetContext, spec.getSpecification(PARTITION_TABLE_NAME), arguments, classLoader); return new PartitionedFileSetDataset(datasetContext, spec.getName(), partitioning, fileset, table, spec, arguments, getExploreProvider()); }
@Override public PartitionedFileSet getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { // properties must contain the partitioning Partitioning partitioning = PartitionedFileSetProperties.getPartitioning(spec.getProperties()); // make any necessary updates to the arguments arguments = updateArgumentsIfNeeded(arguments, partitioning); FileSet fileset = filesetDef.getDataset(datasetContext, spec.getSpecification(FILESET_NAME), arguments, classLoader); IndexedTable table = indexedTableDef.getDataset(datasetContext, spec.getSpecification(PARTITION_TABLE_NAME), arguments, classLoader); return new PartitionedFileSetDataset(datasetContext, spec.getName(), partitioning, fileset, table, spec, arguments, getExploreProvider()); }
@Override public TestDataset getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { DatasetSpecification kvTableSpec = spec.getSpecification("kv"); KeyValueTable table = tableDef.getDataset( datasetContext, kvTableSpec, DatasetDefinition.NO_ARGUMENTS, classLoader); return new TestDataset(spec, table, arguments); } }
@Override public RecordDataset getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { DatasetDefinition<KeyValueTable, DatasetAdmin> kvTbleDef = getDelegate("kv"); KeyValueTable kvTable = kvTbleDef.getDataset(datasetContext, spec.getSpecification("kv"), arguments, classLoader); try { return new RecordDataset(spec, kvTable); } catch (ClassNotFoundException e) { throw Throwables.propagate(e); } } }
@Override public PrefixedTable getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { DatasetSpecification kvTableSpec = spec.getSpecification("table"); KeyValueTable table = tableDef.getDataset(datasetContext, kvTableSpec, DatasetDefinition.NO_ARGUMENTS, classLoader); return new PrefixedTable(spec.getName(), table, arguments); } }