public DatasetSpecification reconfigure(String instanceName, DatasetProperties newProperties, DatasetSpecification currentSpec) throws IncompatibleUpdateException { DatasetSpecification spec = AbstractDatasetDefinition .reconfigure(delegate, instanceName, newProperties, currentSpec) .setOriginalProperties(newProperties); if (newProperties.getDescription() != null) { spec = spec.setDescription(newProperties.getDescription()); } return spec; }
public DatasetSpecification configure(String instanceName, DatasetProperties properties) { DatasetSpecification spec = delegate.configure(instanceName, properties); spec = spec.setOriginalProperties(properties); if (properties.getDescription() != null) { spec = spec.setDescription(properties.getDescription()); } return spec; }
public DatasetSpecification configure(String instanceName, DatasetProperties properties) { DatasetSpecification spec = delegate.configure(instanceName, properties); spec = spec.setOriginalProperties(properties); if (properties.getDescription() != null) { spec = spec.setDescription(properties.getDescription()); } return spec; }
public DatasetSpecification reconfigure(String instanceName, DatasetProperties newProperties, DatasetSpecification currentSpec) throws IncompatibleUpdateException { DatasetSpecification spec = AbstractDatasetDefinition .reconfigure(delegate, instanceName, newProperties, currentSpec) .setOriginalProperties(newProperties); if (newProperties.getDescription() != null) { spec = spec.setDescription(newProperties.getDescription()); } return spec; }
@Override public <T extends DatasetAdmin> T addDatasetInstance(String datasetType, DatasetId datasetId, DatasetProperties props) throws Exception { DatasetInstanceConfiguration dsConf = new DatasetInstanceConfiguration(datasetType, props.getProperties(), props.getDescription(), null); datasetClient.create(datasetId, dsConf); return (T) new RemoteDatasetAdmin(datasetClient, datasetId, dsConf); }
public void addInstance(String datasetInstanceName, String datasetType, DatasetProperties props, @Nullable KerberosPrincipalId owner) throws DatasetManagementException { String ownerPrincipal = owner == null ? null : owner.getPrincipal(); DatasetInstanceConfiguration creationProperties = new DatasetInstanceConfiguration(datasetType, props.getProperties(), props.getDescription(), ownerPrincipal); HttpResponse response = doPut("datasets/" + datasetInstanceName, GSON.toJson(creationProperties)); if (HttpResponseStatus.CONFLICT.code() == response.getResponseCode()) { throw new InstanceConflictException(String.format("Failed to add instance %s due to conflict, details: %s", datasetInstanceName, response)); } if (HttpResponseStatus.FORBIDDEN.code() == response.getResponseCode()) { throw new DatasetManagementException(String.format("Failed to add instance %s, details: %s", datasetInstanceName, response), new UnauthorizedException(response.getResponseBodyAsString())); } if (HttpResponseStatus.OK.code() != response.getResponseCode()) { throw new DatasetManagementException(String.format("Failed to add instance %s, details: %s", datasetInstanceName, response)); } }
public void addInstance(String datasetInstanceName, String datasetType, DatasetProperties props, @Nullable KerberosPrincipalId owner) throws DatasetManagementException { String ownerPrincipal = owner == null ? null : owner.getPrincipal(); DatasetInstanceConfiguration creationProperties = new DatasetInstanceConfiguration(datasetType, props.getProperties(), props.getDescription(), ownerPrincipal); HttpResponse response = doPut("datasets/" + datasetInstanceName, GSON.toJson(creationProperties)); if (HttpResponseStatus.CONFLICT.code() == response.getResponseCode()) { throw new InstanceConflictException(String.format("Failed to add instance %s due to conflict, details: %s", datasetInstanceName, response)); } if (HttpResponseStatus.FORBIDDEN.code() == response.getResponseCode()) { throw new DatasetManagementException(String.format("Failed to add instance %s, details: %s", datasetInstanceName, response), new UnauthorizedException(response.getResponseBodyAsString())); } if (HttpResponseStatus.OK.code() != response.getResponseCode()) { throw new DatasetManagementException(String.format("Failed to add instance %s, details: %s", datasetInstanceName, response)); } }
@Override public void addInstance(String datasetType, DatasetId datasetInstanceId, DatasetProperties props, @Nullable KerberosPrincipalId ownerPrincipal) throws DatasetManagementException, IOException { if (ownerPrincipal != null) { throw new UnsupportedOperationException("Creating dataset with owner is not supported"); } writeLock.lock(); try { if (instances.contains(datasetInstanceId.getParent(), datasetInstanceId)) { throw new InstanceConflictException(String.format("Dataset instance '%s' already exists.", datasetInstanceId)); } DatasetDefinition def = getDefinitionForType(datasetInstanceId.getParent(), datasetType); if (def == null) { throw new DatasetManagementException( String.format("Dataset type '%s' is neither registered in the '%s' namespace nor in the system namespace", datasetType, datasetInstanceId.getParent())); } DatasetSpecification spec = def.configure(datasetInstanceId.getEntityName(), props); spec = spec.setOriginalProperties(props); if (props.getDescription() != null) { spec = spec.setDescription(props.getDescription()); } def.getAdmin(DatasetContext.from(datasetInstanceId.getNamespace()), spec, null).create(); instances.put(datasetInstanceId.getParent(), datasetInstanceId, spec); publishAudit(datasetInstanceId, AuditType.CREATE); LOG.info("Created dataset {} of type {}", datasetInstanceId, datasetType); } finally { writeLock.unlock(); } }
@Override public void addInstance(String datasetType, DatasetId datasetInstanceId, DatasetProperties props, @Nullable KerberosPrincipalId ownerPrincipal) throws DatasetManagementException, IOException { if (ownerPrincipal != null) { throw new UnsupportedOperationException("Creating dataset with owner is not supported"); } writeLock.lock(); try { if (instances.contains(datasetInstanceId.getParent(), datasetInstanceId)) { throw new InstanceConflictException(String.format("Dataset instance '%s' already exists.", datasetInstanceId)); } DatasetDefinition def = getDefinitionForType(datasetInstanceId.getParent(), datasetType); if (def == null) { throw new DatasetManagementException( String.format("Dataset type '%s' is neither registered in the '%s' namespace nor in the system namespace", datasetType, datasetInstanceId.getParent())); } DatasetSpecification spec = def.configure(datasetInstanceId.getEntityName(), props); spec = spec.setOriginalProperties(props); if (props.getDescription() != null) { spec = spec.setDescription(props.getDescription()); } def.getAdmin(DatasetContext.from(datasetInstanceId.getNamespace()), spec, null).create(); instances.put(datasetInstanceId.getParent(), datasetInstanceId, spec); publishAudit(datasetInstanceId, AuditType.CREATE); LOG.info("Created dataset {} of type {}", datasetInstanceId, datasetType); } finally { writeLock.unlock(); } }
AbstractDatasetDefinition.reconfigure(def, datasetInstanceId.getEntityName(), props, oldSpec) .setOriginalProperties(props); if (props.getDescription() != null) { spec = spec.setDescription(props.getDescription());
AbstractDatasetDefinition.reconfigure(def, datasetInstanceId.getEntityName(), props, oldSpec) .setOriginalProperties(props); if (props.getDescription() != null) { spec = spec.setDescription(props.getDescription());
private DatasetProperties addLocalDatasetProperty(DatasetProperties properties, boolean keepLocal) { String dsDescription = properties.getDescription(); DatasetProperties.Builder builder = DatasetProperties.builder(); builder.addAll(properties.getProperties()); builder.add(Constants.AppFabric.WORKFLOW_LOCAL_DATASET_PROPERTY, "true"); builder.add(Constants.AppFabric.WORKFLOW_NAMESPACE_NAME, workflowRunId.getNamespace()); builder.add(Constants.AppFabric.WORKFLOW_APPLICATION_NAME, workflowRunId.getApplication()); builder.add(Constants.AppFabric.WORKFLOW_APPLICATION_VERSION, workflowRunId.getVersion()); builder.add(Constants.AppFabric.WORKFLOW_PROGRAM_NAME, workflowRunId.getProgram()); builder.add(Constants.AppFabric.WORKFLOW_RUN_ID, workflowRunId.getRun()); if (keepLocal) { builder.add(Constants.AppFabric.WORKFLOW_KEEP_LOCAL, "true"); } builder.setDescription(dsDescription); return builder.build(); }
private DatasetProperties addLocalDatasetProperty(DatasetProperties properties, boolean keepLocal) { String dsDescription = properties.getDescription(); DatasetProperties.Builder builder = DatasetProperties.builder(); builder.addAll(properties.getProperties()); builder.add(Constants.AppFabric.WORKFLOW_LOCAL_DATASET_PROPERTY, "true"); builder.add(Constants.AppFabric.WORKFLOW_NAMESPACE_NAME, workflowRunId.getNamespace()); builder.add(Constants.AppFabric.WORKFLOW_APPLICATION_NAME, workflowRunId.getApplication()); builder.add(Constants.AppFabric.WORKFLOW_APPLICATION_VERSION, workflowRunId.getVersion()); builder.add(Constants.AppFabric.WORKFLOW_PROGRAM_NAME, workflowRunId.getProgram()); builder.add(Constants.AppFabric.WORKFLOW_RUN_ID, workflowRunId.getRun()); if (keepLocal) { builder.add(Constants.AppFabric.WORKFLOW_KEEP_LOCAL, "true"); } builder.setDescription(dsDescription); return builder.build(); }