@Override public int hashCode() { return Objects.hash(instanceName, typeName, props.getProperties()); } }
private DatasetProperties addIndexColumns(DatasetProperties properties, String... indexColumns) { return DatasetProperties .builder() .addAll(properties.getProperties()) .add(IndexedTable.INDEX_COLUMNS_CONF_KEY, Joiner.on(",").join(indexColumns)) .build(); }
/** * strip schema from the properties sent to the underlying table, since ObjectStore allows schema that tables do not */ private DatasetProperties checkAndRemoveSchema(DatasetProperties properties) { Preconditions.checkArgument(properties.getProperties().containsKey("type")); Preconditions.checkArgument(properties.getProperties().containsKey("schema")); Map<String, String> tableProperties = Maps.newHashMap(properties.getProperties()); tableProperties.remove("type"); tableProperties.remove("schema"); return DatasetProperties.of(tableProperties); }
@Override public <T extends DatasetAdmin> T addDatasetInstance(String datasetType, DatasetId datasetId, DatasetProperties props) throws Exception { DatasetInstanceConfiguration dsConf = new DatasetInstanceConfiguration(datasetType, props.getProperties(), props.getDescription(), null); datasetClient.create(datasetId, dsConf); return (T) new RemoteDatasetAdmin(datasetClient, datasetId, dsConf); }
/** * Helper method to create a DatasetProperties from a map of key/values. */ public static DatasetProperties of(Map<String, String> props) { return builder().addAll(props).build(); }
private DatasetProperties addLocalDatasetProperty(DatasetProperties properties, boolean keepLocal) { String dsDescription = properties.getDescription(); DatasetProperties.Builder builder = DatasetProperties.builder(); builder.addAll(properties.getProperties()); builder.add(Constants.AppFabric.WORKFLOW_LOCAL_DATASET_PROPERTY, "true"); builder.add(Constants.AppFabric.WORKFLOW_NAMESPACE_NAME, workflowRunId.getNamespace()); builder.add(Constants.AppFabric.WORKFLOW_APPLICATION_NAME, workflowRunId.getApplication()); builder.add(Constants.AppFabric.WORKFLOW_APPLICATION_VERSION, workflowRunId.getVersion()); builder.add(Constants.AppFabric.WORKFLOW_PROGRAM_NAME, workflowRunId.getProgram()); builder.add(Constants.AppFabric.WORKFLOW_RUN_ID, workflowRunId.getRun()); if (keepLocal) { builder.add(Constants.AppFabric.WORKFLOW_KEEP_LOCAL, "true"); } builder.setDescription(dsDescription); return builder.build(); }
public DatasetSpecification reconfigure(String instanceName, DatasetProperties newProperties, DatasetSpecification currentSpec) throws IncompatibleUpdateException { DatasetSpecification spec = AbstractDatasetDefinition .reconfigure(delegate, instanceName, newProperties, currentSpec) .setOriginalProperties(newProperties); if (newProperties.getDescription() != null) { spec = spec.setDescription(newProperties.getDescription()); } return spec; }
public static JobQueueDataset getJobQueue(DatasetContext context, DatasetFramework dsFramework, CConfiguration cConf) { try { return DatasetsUtil.getOrCreateDataset( context, dsFramework, JOB_QUEUE_DATASET_ID, JobQueueDataset.class.getName(), () -> DatasetProperties.of( Collections.singletonMap(Constants.Scheduler.JOB_QUEUE_NUM_PARTITIONS, cConf.get(Constants.Scheduler.JOB_QUEUE_NUM_PARTITIONS)))); } catch (DatasetManagementException | IOException e) { throw Throwables.propagate(e); } }
/** * Create a DatasetProperties from this builder, using the private DatasetProperties * constructor. */ public DatasetProperties build() { return new DatasetProperties(Collections.unmodifiableMap(this.properties), description); } }
/** * Adds datasets and types to the given {@link DatasetFramework}. Used by the upgrade tool to upgrade Datasets * * @param datasetFramework framework to add types and datasets to */ public static void setupDatasets(DatasetFramework datasetFramework) throws IOException, DatasetManagementException { datasetFramework.addInstance(JobQueueDataset.class.getName(), Schedulers.JOB_QUEUE_DATASET_ID, DatasetProperties.builder(). add(Constants.Scheduler.JOB_QUEUE_NUM_PARTITIONS, DEFAULT_NUM_PARTITIONS) .build()); } }
private DatasetProperties addLocalDatasetProperty(DatasetProperties properties, boolean keepLocal) { String dsDescription = properties.getDescription(); DatasetProperties.Builder builder = DatasetProperties.builder(); builder.addAll(properties.getProperties()); builder.add(Constants.AppFabric.WORKFLOW_LOCAL_DATASET_PROPERTY, "true"); builder.add(Constants.AppFabric.WORKFLOW_NAMESPACE_NAME, workflowRunId.getNamespace()); builder.add(Constants.AppFabric.WORKFLOW_APPLICATION_NAME, workflowRunId.getApplication()); builder.add(Constants.AppFabric.WORKFLOW_APPLICATION_VERSION, workflowRunId.getVersion()); builder.add(Constants.AppFabric.WORKFLOW_PROGRAM_NAME, workflowRunId.getProgram()); builder.add(Constants.AppFabric.WORKFLOW_RUN_ID, workflowRunId.getRun()); if (keepLocal) { builder.add(Constants.AppFabric.WORKFLOW_KEEP_LOCAL, "true"); } builder.setDescription(dsDescription); return builder.build(); }
public void addInstance(String datasetInstanceName, String datasetType, DatasetProperties props, @Nullable KerberosPrincipalId owner) throws DatasetManagementException { String ownerPrincipal = owner == null ? null : owner.getPrincipal(); DatasetInstanceConfiguration creationProperties = new DatasetInstanceConfiguration(datasetType, props.getProperties(), props.getDescription(), ownerPrincipal); HttpResponse response = doPut("datasets/" + datasetInstanceName, GSON.toJson(creationProperties)); if (HttpResponseStatus.CONFLICT.code() == response.getResponseCode()) { throw new InstanceConflictException(String.format("Failed to add instance %s due to conflict, details: %s", datasetInstanceName, response)); } if (HttpResponseStatus.FORBIDDEN.code() == response.getResponseCode()) { throw new DatasetManagementException(String.format("Failed to add instance %s, details: %s", datasetInstanceName, response), new UnauthorizedException(response.getResponseBodyAsString())); } if (HttpResponseStatus.OK.code() != response.getResponseCode()) { throw new DatasetManagementException(String.format("Failed to add instance %s, details: %s", datasetInstanceName, response)); } }
public DatasetSpecification configure(String instanceName, DatasetProperties properties) { DatasetSpecification spec = delegate.configure(instanceName, properties); spec = spec.setOriginalProperties(properties); if (properties.getDescription() != null) { spec = spec.setDescription(properties.getDescription()); } return spec; }
public static JobQueueDataset getJobQueue(DatasetContext context, DatasetFramework dsFramework, CConfiguration cConf) { try { return DatasetsUtil.getOrCreateDataset( context, dsFramework, JOB_QUEUE_DATASET_ID, JobQueueDataset.class.getName(), () -> DatasetProperties.of( Collections.singletonMap(Constants.Scheduler.JOB_QUEUE_NUM_PARTITIONS, cConf.get(Constants.Scheduler.JOB_QUEUE_NUM_PARTITIONS)))); } catch (DatasetManagementException | IOException e) { throw Throwables.propagate(e); } }
/** * @return the name of the Explore database configured by the properties, or null if not configured. */ @Nullable public static String getExploreDatabaseName(DatasetProperties props) { return getExploreDatabaseName(props.getProperties()); }
private DatasetProperties addIndexColumns(DatasetProperties properties, String... indexColumns) { return DatasetProperties .builder() .addAll(properties.getProperties()) .add(IndexedTable.INDEX_COLUMNS_CONF_KEY, Joiner.on(",").join(indexColumns)) .build(); }
/** * Adds datasets and types to the given {@link DatasetFramework}. Used by the upgrade tool to upgrade Datasets * * @param datasetFramework framework to add types and datasets to */ public static void setupDatasets(DatasetFramework datasetFramework) throws IOException, DatasetManagementException { datasetFramework.addInstance(JobQueueDataset.class.getName(), Schedulers.JOB_QUEUE_DATASET_ID, DatasetProperties.builder(). add(Constants.Scheduler.JOB_QUEUE_NUM_PARTITIONS, DEFAULT_NUM_PARTITIONS) .build()); } }
/** * strip schema from the properties sent to the underlying table, since ObjectStore allows schema that tables do not */ private DatasetProperties checkAndRemoveSchema(DatasetProperties properties) { Preconditions.checkArgument(properties.getProperties().containsKey("type")); Preconditions.checkArgument(properties.getProperties().containsKey("schema")); Map<String, String> tableProperties = Maps.newHashMap(properties.getProperties()); tableProperties.remove("type"); tableProperties.remove("schema"); return DatasetProperties.of(tableProperties); }
public void addInstance(String datasetInstanceName, String datasetType, DatasetProperties props, @Nullable KerberosPrincipalId owner) throws DatasetManagementException { String ownerPrincipal = owner == null ? null : owner.getPrincipal(); DatasetInstanceConfiguration creationProperties = new DatasetInstanceConfiguration(datasetType, props.getProperties(), props.getDescription(), ownerPrincipal); HttpResponse response = doPut("datasets/" + datasetInstanceName, GSON.toJson(creationProperties)); if (HttpResponseStatus.CONFLICT.code() == response.getResponseCode()) { throw new InstanceConflictException(String.format("Failed to add instance %s due to conflict, details: %s", datasetInstanceName, response)); } if (HttpResponseStatus.FORBIDDEN.code() == response.getResponseCode()) { throw new DatasetManagementException(String.format("Failed to add instance %s, details: %s", datasetInstanceName, response), new UnauthorizedException(response.getResponseBodyAsString())); } if (HttpResponseStatus.OK.code() != response.getResponseCode()) { throw new DatasetManagementException(String.format("Failed to add instance %s, details: %s", datasetInstanceName, response)); } }
public DatasetSpecification reconfigure(String instanceName, DatasetProperties newProperties, DatasetSpecification currentSpec) throws IncompatibleUpdateException { DatasetSpecification spec = AbstractDatasetDefinition .reconfigure(delegate, instanceName, newProperties, currentSpec) .setOriginalProperties(newProperties); if (newProperties.getDescription() != null) { spec = spec.setDescription(newProperties.getDescription()); } return spec; }