/** * Reconfigure a dataset instance. Delegates to {@link Reconfigurable#reconfigure} if the dataset definition * implements that interface, and delegates to {@link #configure} otherwise. * @param def the dataset definition that will perform the (re)configure * @param name name of the dataset instance to reconfigure * @param newProps the updated dataset properties * @param currentSpec the current dataset specification * @return a new dataset specification representing the updated properties * @throws IncompatibleUpdateException if the updated properties are incompatible with the existing properties */ public static DatasetSpecification reconfigure(DatasetDefinition def, String name, DatasetProperties newProps, DatasetSpecification currentSpec) throws IncompatibleUpdateException { return def instanceof Reconfigurable ? ((Reconfigurable) def).reconfigure(name, newProps, currentSpec) : def.configure(name, newProps); } }
public D getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments) throws IOException { return delegate.getDataset(datasetContext, spec, arguments, classLoader); } }
@Override public DatasetAdmin getAdmin(DatasetContext datasetContext, DatasetSpecification spec, ClassLoader classLoader) throws IOException { return tableDefinition.getAdmin(datasetContext, spec, classLoader); }
@Test public void test() throws IOException { DatasetDefinition<? extends NoTxKeyValueTable, ? extends DatasetAdmin> def = getDefinition(); DatasetSpecification spec = def.configure("table", DatasetProperties.EMPTY); DatasetAdmin admin = def.getAdmin(datasetContext, spec, cl); Assert.assertFalse(admin.exists()); admin.create(); NoTxKeyValueTable table = def.getDataset(datasetContext, spec, NO_ARGS, cl); Assert.assertNull(table.get(KEY1)); table.put(KEY1, VALUE1);
@Override public void addInstance(String datasetType, DatasetId datasetInstanceId, DatasetProperties props, @Nullable KerberosPrincipalId ownerPrincipal) throws DatasetManagementException, IOException { if (ownerPrincipal != null) { throw new UnsupportedOperationException("Creating dataset with owner is not supported"); } writeLock.lock(); try { if (instances.contains(datasetInstanceId.getParent(), datasetInstanceId)) { throw new InstanceConflictException(String.format("Dataset instance '%s' already exists.", datasetInstanceId)); } DatasetDefinition def = getDefinitionForType(datasetInstanceId.getParent(), datasetType); if (def == null) { throw new DatasetManagementException( String.format("Dataset type '%s' is neither registered in the '%s' namespace nor in the system namespace", datasetType, datasetInstanceId.getParent())); } DatasetSpecification spec = def.configure(datasetInstanceId.getEntityName(), props); spec = spec.setOriginalProperties(props); if (props.getDescription() != null) { spec = spec.setDescription(props.getDescription()); } def.getAdmin(DatasetContext.from(datasetInstanceId.getNamespace()), spec, null).create(); instances.put(datasetInstanceId.getParent(), datasetInstanceId, spec); publishAudit(datasetInstanceId, AuditType.CREATE); LOG.info("Created dataset {} of type {}", datasetInstanceId, datasetType); } finally { writeLock.unlock(); } }
@Override public void add(DatasetDefinition def) { delegate.add(def); types.add(def.getName()); }
@Override public void addInstance(String datasetType, DatasetId datasetInstanceId, DatasetProperties props, @Nullable KerberosPrincipalId ownerPrincipal) throws DatasetManagementException, IOException { if (ownerPrincipal != null) { throw new UnsupportedOperationException("Creating dataset with owner is not supported"); } writeLock.lock(); try { if (instances.contains(datasetInstanceId.getParent(), datasetInstanceId)) { throw new InstanceConflictException(String.format("Dataset instance '%s' already exists.", datasetInstanceId)); } DatasetDefinition def = getDefinitionForType(datasetInstanceId.getParent(), datasetType); if (def == null) { throw new DatasetManagementException( String.format("Dataset type '%s' is neither registered in the '%s' namespace nor in the system namespace", datasetType, datasetInstanceId.getParent())); } DatasetSpecification spec = def.configure(datasetInstanceId.getEntityName(), props); spec = spec.setOriginalProperties(props); if (props.getDescription() != null) { spec = spec.setDescription(props.getDescription()); } def.getAdmin(DatasetContext.from(datasetInstanceId.getNamespace()), spec, null).create(); instances.put(datasetInstanceId.getParent(), datasetInstanceId, spec); publishAudit(datasetInstanceId, AuditType.CREATE); LOG.info("Created dataset {} of type {}", datasetInstanceId, datasetType); } finally { writeLock.unlock(); } }
@Override public void add(DatasetDefinition def) { String typeName = def.getName(); if (datasetTypes.containsKey(typeName)) { throw new TypeConflictException("Cannot add dataset type: it already exists: " + typeName); } datasetTypes.put(typeName, def); }
@Override public DatasetSpecification configure(String instanceName, DatasetProperties properties) { List<DatasetSpecification> specs = this.delegates.entrySet().stream() .map(impl -> impl.getValue().configure(impl.getKey(), properties)) .collect(Collectors.toList()); return DatasetSpecification.builder(instanceName, getName()) .properties(properties.getProperties()) .datasets(specs) .build(); }
public D getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments) throws IOException { return delegate.getDataset(datasetContext, spec, arguments, classLoader); } }
@Override public DatasetAdmin getAdmin(DatasetContext datasetContext, DatasetSpecification spec, ClassLoader classLoader) throws IOException { return tableDefinition.getAdmin(datasetContext, spec, classLoader); }
@Override public void add(DatasetDefinition def) { delegate.add(def); types.add(def.getName()); }
@Override public DatasetSpecification configure(String instanceName, DatasetProperties properties) { DatasetProperties factTableProperties = computeFactTableProperties(properties); List<DatasetSpecification> datasetSpecs = Lists.newArrayList(); // Configuring table that hold mappings of tag names and values and such datasetSpecs.add(metricsTableDef.configure("entity", properties)); // NOTE: we create a table per resolution; we later will use that to e.g. configure ttl separately for each for (int resolution : getResolutions(properties.getProperties())) { datasetSpecs.add(tableDef.configure(String.valueOf(resolution), factTableProperties)); } return DatasetSpecification.builder(instanceName, getName()) .properties(properties.getProperties()) .datasets(datasetSpecs) .build(); }
@Override public UsageDataset getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { Table table = tableDefinition.getDataset(datasetContext, spec, arguments, classLoader); return new UsageDataset(table); } }
public A getAdmin(DatasetContext datasetContext, DatasetSpecification spec) throws IOException { return delegate.getAdmin(datasetContext, spec, classLoader); }
@Override public void add(DatasetDefinition def) { String typeName = def.getName(); if (datasetTypes.containsKey(typeName)) { throw new TypeConflictException("Cannot add dataset type: it already exists: " + typeName); } datasetTypes.put(typeName, def); }
@Override public DatasetSpecification configure(String instanceName, DatasetProperties properties) { DatasetProperties factTableProperties = computeFactTableProperties(properties); List<DatasetSpecification> datasetSpecs = Lists.newArrayList(); // Configuring table that hold mappings of tag names and values and such datasetSpecs.add(metricsTableDef.configure("entity", properties)); // NOTE: we create a table per resolution; we later will use that to e.g. configure ttl separately for each for (int resolution : getResolutions(properties.getProperties())) { datasetSpecs.add(tableDef.configure(String.valueOf(resolution), factTableProperties)); } return DatasetSpecification.builder(instanceName, getName()) .properties(properties.getProperties()) .datasets(datasetSpecs) .build(); }
@Override public UsageDataset getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { Table table = tableDefinition.getDataset(datasetContext, spec, arguments, classLoader); return new UsageDataset(table); } }
public A getAdmin(DatasetContext datasetContext, DatasetSpecification spec) throws IOException { return delegate.getAdmin(datasetContext, spec, classLoader); }
@Override public void add(DatasetDefinition def) { String typeName = def.getName(); DatasetTypeId typeId = getNamespaceId().datasetType(typeName); DatasetTypeMeta existingType = datasetTypeMDS.getType(typeId); if (existingType != null) { DatasetModuleMeta existingModule = existingType.getModules().get(existingType.getModules().size() - 1); // we allow redefining an existing type if // - it was previously defined by the same module (i.e., this is an upgrade of that module) // - it is a forced update and the existing type is not a system type if (!moduleBeingAdded.getEntityName().equals(existingModule.getName()) && (!tolerateConflicts || NamespaceId.SYSTEM.getNamespace().equals(existingModule.getName()))) { throw new TypeConflictException(String.format( "Attempt to add dataset module '%s' containing dataset type '%s' that already exists in module '%s'", moduleBeingAdded.getEntityName(), typeName, existingModule.getName())); } } types.add(typeName); registry.add(def); }