/** * strip schema from the properties sent to the underlying table, since ObjectStore allows schema that tables do not */ private DatasetProperties checkAndRemoveSchema(DatasetProperties properties) { Preconditions.checkArgument(properties.getProperties().containsKey("type")); Preconditions.checkArgument(properties.getProperties().containsKey("schema")); Map<String, String> tableProperties = Maps.newHashMap(properties.getProperties()); tableProperties.remove("type"); tableProperties.remove("schema"); return DatasetProperties.of(tableProperties); }
/** * @return the name of the Explore table configured by the properties, or null if not configured. */ @Nullable public static String getExploreTableName(DatasetProperties props) { return getExploreTableName(props.getProperties()); }
/** * @return whether the dataset properties enable read-less increments. Defaults to false. */ public static boolean getReadlessIncrementSupport(DatasetProperties props) { return getReadlessIncrementSupport(props.getProperties()); }
@Override public DatasetSpecification configure(String instanceName, DatasetProperties properties) { Map<String, String> newProperties = new HashMap<>(properties.getProperties()); validateProperties(properties.getProperties()); newProperties.put(FileSetDataset.FILESET_VERSION_PROPERTY, FileSetDataset.FILESET_VERSION); return DatasetSpecification .builder(instanceName, getName()) .properties(newProperties) .build(); }
private DatasetProperties addIndexColumns(DatasetProperties properties, String... indexColumns) { return DatasetProperties .builder() .addAll(properties.getProperties()) .add(IndexedTable.INDEX_COLUMNS_CONF_KEY, Joiner.on(",").join(indexColumns)) .build(); }
@Override public DatasetSpecification reconfigure(String instanceName, DatasetProperties properties, DatasetSpecification currentSpec) throws IncompatibleUpdateException { if (!Objects.equals(currentSpec.getProperties().get(NOT_RECONFIGURABLE), properties.getProperties().get(NOT_RECONFIGURABLE))) { throw new IncompatibleUpdateException(String.format("Can't change %s from %s to %s. ", NOT_RECONFIGURABLE, currentSpec.getProperties().get(NOT_RECONFIGURABLE), properties.getProperties().get(NOT_RECONFIGURABLE))); } return configure(instanceName, properties); }
/** * Creates properties for {@link TimeseriesTable} data set instance. * @param timeIntervalToStorePerRow time interval to store per row. See {@link TimeseriesTable} for details. * @return {@link co.cask.cdap.api.dataset.DatasetProperties} for the data set */ public static DatasetProperties timeseriesTableProperties(int timeIntervalToStorePerRow, DatasetProperties props) { return DatasetProperties.builder() .add(TimeseriesTable.ATTR_TIME_INTERVAL_TO_STORE_PER_ROW, timeIntervalToStorePerRow) .addAll(props.getProperties()) .build(); } }
@Override public <T extends DatasetAdmin> T addDatasetInstance(String datasetType, DatasetId datasetId, DatasetProperties props) throws Exception { DatasetInstanceConfiguration dsConf = new DatasetInstanceConfiguration(datasetType, props.getProperties(), props.getDescription(), null); datasetClient.create(datasetId, dsConf); return (T) new RemoteDatasetAdmin(datasetClient, datasetId, dsConf); }
@Override public DatasetSpecification configure(String instanceName, DatasetProperties properties) { return DatasetSpecification.builder(instanceName, getName()) .properties(properties.getProperties()) .build(); }
@Override public DatasetSpecification reconfigure(String name, DatasetProperties properties, DatasetSpecification currentSpec) throws IncompatibleUpdateException { return DatasetSpecification.builder(name, getName()) .properties(properties.getProperties()) .build(); }
@Override public DatasetSpecification configure(String name, DatasetProperties properties) { return DatasetSpecification.builder(name, getName()) .properties(properties.getProperties()) .property(Constants.Dataset.TABLE_TX_DISABLED, "true") .build(); }
@Override public DatasetSpecification reconfigure(String instanceName, DatasetProperties properties, DatasetSpecification currentSpec) throws IncompatibleUpdateException { // add the partition key to the properties. properties = PartitionedFileSetProperties .builder() .setPartitioning(TimePartitionedFileSetDataset.PARTITIONING) .addAll(properties.getProperties()) .build(); return super.reconfigure(instanceName, properties, currentSpec); }
@Override public DatasetSpecification configure(String instanceName, DatasetProperties properties) { // add the partition key to the properties. properties = PartitionedFileSetProperties .builder() .setPartitioning(TimePartitionedFileSetDataset.PARTITIONING) .addAll(properties.getProperties()) .build(); return super.configure(instanceName, properties); }
private String getSchemaString(DatasetProperties properties) { try { String className = properties.getProperties().get("recordClassName"); Class<?> recordClass = Class.forName(className); Schema schema = ObjectMappedTableProperties.getObjectSchema( ObjectMappedTableProperties.builder().setType(recordClass).build().getProperties()); schema = Schema.recordOf("record", schema.getFields()); return schema.toString(); } catch (ClassNotFoundException | UnsupportedTypeException | IOException e) { throw Throwables.propagate(e); } }
@Override public DatasetSpecification configure(String instanceName, DatasetProperties properties) { return DatasetSpecification.builder(instanceName, getName()) .properties(properties.getProperties()) .datasets(tableDef.configure("objects", properties)) .build(); }
@Override public DatasetSpecification configure(String instanceName, DatasetProperties properties) { return DatasetSpecification.builder(instanceName, getName()) .properties(properties.getProperties()) .datasets(tableDef.configure("objects", properties)) .build(); }
@GET @Path("props/{dataset}") public void properties(HttpServiceRequest request, HttpServiceResponder responder, @PathParam("dataset") String dataset) throws DatasetManagementException { Admin admin = getContext().getAdmin(); if (!admin.datasetExists(dataset)) { responder.sendStatus(404); return; } responder.sendJson(200, admin.getDatasetProperties(dataset).getProperties()); }
private HttpResponse updateInstance(DatasetId instance, DatasetProperties props) throws IOException { HttpRequest request = HttpRequest.put(getUrl(instance.getNamespace(), "/data/datasets/" + instance.getEntityName() + "/properties")) .withBody(GSON.toJson(props.getProperties())).build(); return HttpRequests.execute(request); }
@Override public DatasetSpecification configure(String instanceName, DatasetProperties properties) { String schemaString = getSchemaString(properties); return DatasetSpecification.builder(instanceName, RecordDataset.class.getName()) .properties(properties.getProperties()) .property("schema", schemaString) .datasets(getDelegate("kv").configure("kv", DatasetProperties.EMPTY)) .build(); }
@Override public DatasetSpecification configure(String instanceName, DatasetProperties properties) { return DatasetSpecification.builder(instanceName, getName()) .properties(properties.getProperties()) .datasets(tableDef.configure("kv", properties)) .build(); }