@Override public ObjectMappedTableDataset<?> getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { String keyName = ObjectMappedTableProperties.getRowKeyExploreName(spec.getProperties()); DatasetSpecification tableSpec = spec.getSpecification(TABLE_NAME); // if the table spec did not have schema, this is an ObjectMappedTable from CDAP 2.8. // add the schema and row key as arguments so that explore will work // TODO: remove after CDAP-2122 is done if (!tableSpec.getProperties().containsKey(Table.PROPERTY_SCHEMA)) { tableSpec = DatasetSpecification.builder(tableSpec.getName(), tableSpec.getType()) .properties(tableSpec.getProperties()) .property(Table.PROPERTY_SCHEMA, spec.getProperty(Table.PROPERTY_SCHEMA)) .property(Table.PROPERTY_SCHEMA_ROW_FIELD, keyName) .datasets(tableSpec.getSpecifications().values()) .build(); } // reconstruct the table schema here because of backwards compatibility DatasetDefinition<Table, DatasetAdmin> tableDef = getDelegate(TABLE_NAME); Table table = tableDef.getDataset(datasetContext, tableSpec, arguments, classLoader); Map<String, String> properties = spec.getProperties(); TypeRepresentation typeRep = GSON.fromJson( ObjectMappedTableProperties.getObjectTypeRepresentation(properties), TypeRepresentation.class); Schema objSchema = ObjectMappedTableProperties.getObjectSchema(properties); return new ObjectMappedTableDataset(spec.getName(), table, typeRep, objSchema, classLoader); }
private boolean isParent(String datasetName, DatasetSpecification specification) { if (datasetName == null) { return false; } if (specification.getSpecifications().size() == 0 && specification.getName().equals(datasetName)) { return true; } if (datasetName.startsWith(specification.getName())) { return specification.getSpecifications().values().stream().anyMatch(spec -> isParent(datasetName, spec)); } return false; }
if (spec == null || spec.getOriginalProperties() != null) { return spec; Map<String, String> props = new TreeMap<>(spec.getProperties()); if (!props.isEmpty()) { String type = spec.getType(); for (String key : spec.getProperties().keySet()) { if (key.startsWith(PartitionedFileSetProperties.PARTITIONING_FIELD_PREFIX)) { props.remove(key); return spec.setOriginalProperties(props);
/** * Base constructor that only sets the name of the data set. */ TimeseriesDataset(DatasetSpecification spec, Table table) { super(spec.getName(), table); this.rowPartitionIntervalSize = getIntervalSize(spec.getProperties()); this.table = table; }
@Override public ObjectStoreDataset<?> getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { DatasetSpecification kvTableSpec = spec.getSpecification("objects"); KeyValueTable table = tableDef.getDataset(datasetContext, kvTableSpec, arguments, classLoader); TypeRepresentation typeRep = GSON.fromJson(spec.getProperty("type"), TypeRepresentation.class); Schema schema = GSON.fromJson(spec.getProperty("schema"), Schema.class); return new ObjectStoreDataset(spec.getName(), table, typeRep, schema, classLoader); } }
@Test public void testNamespacing() { DatasetSpecification innerSpec = DatasetSpecification.builder("inner", "table") .build(); DatasetSpecification outerSpec = DatasetSpecification.builder("kv", "kvtable") .datasets(innerSpec) .build(); Assert.assertEquals("kv", outerSpec.getName()); Assert.assertEquals("kvtable", outerSpec.getType()); DatasetSpecification actualInner = outerSpec.getSpecification("inner"); Assert.assertEquals("kv.inner", actualInner.getName()); Assert.assertEquals("table", actualInner.getType()); Assert.assertTrue(actualInner.getSpecifications().isEmpty()); }
@Override public InMemoryTableAdmin getAdmin(DatasetContext datasetContext, DatasetSpecification spec, ClassLoader classLoader) throws IOException { return new InMemoryTableAdmin(datasetContext, spec.getName(), cConf); } }
static Collection<DatasetSpecificationSummary> spec2Summary(Collection<DatasetSpecification> specs) { List<DatasetSpecificationSummary> datasetSummaries = Lists.newArrayList(); for (DatasetSpecification spec : specs) { // TODO: (CDAP-3097) handle system datasets specially within a namespace instead of filtering them out // by the handler. This filter is only in the list endpoint because the other endpoints are used by // HBaseQueueAdmin through DatasetFramework. spec = DatasetsUtil.fixOriginalProperties(spec); datasetSummaries.add(new DatasetSpecificationSummary(spec.getName(), spec.getType(), spec.getDescription(), spec.getOriginalProperties())); } return datasetSummaries; }
@Override public Collection<DatasetSpecificationSummary> getInstances(NamespaceId namespaceId, Map<String, String> properties) { readLock.lock(); try { // don't expect this to be called a lot. // might be better to maintain this collection separately and just return it, but seems like its not worth it. Collection<DatasetSpecification> specs = instances.row(namespaceId).values(); ImmutableList.Builder<DatasetSpecificationSummary> specSummaries = ImmutableList.builder(); for (DatasetSpecification spec : specs) { if (properties.isEmpty() || Maps.difference(properties, spec.getProperties()).entriesOnlyOnLeft().isEmpty()) { specSummaries.add(new DatasetSpecificationSummary(spec.getName(), spec.getType(), spec.getProperties())); } } return specSummaries.build(); } finally { readLock.unlock(); } }
public RecordDataset(DatasetSpecification spec, KeyValueTable table) throws ClassNotFoundException { super(spec.getName(), table); this.table = table; this.recordClass = Class.forName(spec.getProperty("recordClassName")); }
@Override public FakeDataset getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { DatasetSpecification kvTableSpec = spec.getSpecification("objects"); KeyValueTable table = tableDef.getDataset(datasetContext, kvTableSpec, arguments, classLoader); return new FakeDataset(spec.getName(), table); } }
@Override protected CoprocessorJar createCoprocessorJar() throws IOException { boolean supportsIncrement = TableProperties.getReadlessIncrementSupport(spec.getProperties()); boolean transactional = DatasetsUtil.isTransactional(spec.getProperties()); return createCoprocessorJarInternal(conf, coprocessorManager, tableUtil, transactional, supportsIncrement); }
@Override public DatasetSpecification configure(String instanceName, DatasetProperties properties) { List<DatasetSpecification> specs = this.delegates.entrySet().stream() .map(impl -> impl.getValue().configure(impl.getKey(), properties)) .collect(Collectors.toList()); return DatasetSpecification.builder(instanceName, getName()) .properties(properties.getProperties()) .datasets(specs) .build(); }
@Override public DatasetSpecification reconfigure(String instanceName, DatasetProperties newProperties, DatasetSpecification currentSpec) throws IncompatibleUpdateException { // TODO (CDAP-6268): validate schema compatibility return DatasetSpecification.builder(instanceName, getName()) .properties(newProperties.getProperties()) .datasets(AbstractDatasetDefinition.reconfigure(tableDef, "objects", checkAndRemoveSchema(newProperties), currentSpec.getSpecification("objects"))) .build(); }
@Override public void deleteAllInstances(NamespaceId namespaceId) throws DatasetManagementException, IOException { writeLock.lock(); try { for (DatasetSpecification spec : instances.row(namespaceId).values()) { DatasetDefinition def = getDefinitionForType(namespaceId, spec.getType()); if (def == null) { throw new DatasetManagementException( String.format("Dataset type '%s' is neither registered in the '%s' namespace nor in the system namespace", spec.getType(), namespaceId)); } def.getAdmin(DatasetContext.from(namespaceId.getEntityName()), spec, null).drop(); publishAudit(namespaceId.dataset(spec.getName()), AuditType.DELETE); } instances.row(namespaceId).clear(); } finally { writeLock.unlock(); } }
@Override public MetadataDataset getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { String scope = spec.getProperty(SCOPE_KEY); return new MetadataDataset(indexedTableDef.getDataset(datasetContext, spec.getSpecification(METADATA_INDEX_TABLE_NAME), arguments, classLoader), scope == null ? MetadataScope.USER : MetadataScope.valueOf(scope)); } }
public DatasetSpecification configure(String instanceName, DatasetProperties properties) { DatasetSpecification spec = delegate.configure(instanceName, properties); spec = spec.setOriginalProperties(properties); if (properties.getDescription() != null) { spec = spec.setDescription(properties.getDescription()); } return spec; }
@Test public void testPermissions() throws Exception { // validate that the fileset permissions and group were applied to the embedded fileset (just sanity test) PartitionedFileSet pfs = dsFrameworkUtil.getInstance(pfsInstance); Location loc = pfs.getEmbeddedFileSet().getLocation("some/random/path"); loc.getOutputStream().close(); Assert.assertEquals(fsPermissions, loc.getPermissions()); Assert.assertEquals(group, loc.getGroup()); Map<String, String> props = dsFrameworkUtil.getSpec(pfsInstance).getSpecification("partitions").getProperties(); Assert.assertEquals(tablePermissions, TableProperties.getTablePermissions(props)); }
@Override public DatasetAdmin getAdmin(DatasetContext datasetContext, DatasetSpecification spec, ClassLoader classLoader) throws IOException { return indexedTableDef.getAdmin(datasetContext, spec.getSpecification(METADATA_INDEX_TABLE_NAME), classLoader); }
@Override public void update(DatasetSpecification oldSpec) throws IOException { // update all existing resolution tables, create all new resolutions for (Map.Entry<String, DatasetSpecification> entry : spec.getSpecifications().entrySet()) { DatasetSpecification oldSubSpec = spec.getSpecification(entry.getKey()); DatasetAdmin subAdmin = delegates.get(entry.getKey()); if (oldSubSpec != null && subAdmin instanceof Updatable) { ((Updatable) subAdmin).update(oldSubSpec); } else if (oldSubSpec == null) { subAdmin.create(); } } // TODO (CDAP-6342) delete all resolutions that were removed as part of the update } }