/** * Configure read-less increment support in a table's dataset properties. */ public static void setReadlessIncrementSupport(DatasetProperties.Builder builder, boolean enabled) { builder.add(Table.PROPERTY_READLESS_INCREMENT, String.valueOf(enabled)); }
/** * Set the table permissions as a map from user name to a permission string. */ @Beta public static void setTablePermissions(DatasetProperties.Builder builder, Map<String, String> permissions) { builder.add(PROPERTY_TABLE_PERMISSIONS, GSON.toJson(permissions)); }
private DatasetProperties addIndexColumns(DatasetProperties properties, String... indexColumns) { return DatasetProperties .builder() .addAll(properties.getProperties()) .add(IndexedTable.INDEX_COLUMNS_CONF_KEY, Joiner.on(",").join(indexColumns)) .build(); }
@Override public void configure() { setName(NAME); addDatasetModule("other", RecordDatasetModule.class); createDataset(DATASET_NAME, RecordDataset.class, DatasetProperties.builder().add("recordClassName", getRecordClass().getName()).build()); addService("NoOpService", new NoOpHandler()); } }
@Override public void configure() { setName(NAME); addDatasetModule("record", RecordDatasetModule.class); createDataset(DATASET_NAME, RecordDataset.class.getName(), DatasetProperties.builder().add("recordClassName", getRecordClass().getName()).build()); addService("NoOpService", new NoOpHandler()); }
/** * Set the schema in a table's dataset properties. */ public static void setSchema(DatasetProperties.Builder builder, Schema schema) { builder.add(DatasetProperties.SCHEMA, schema.toString()); }
/** * Set the table TTL, in seconds, in dataset properties. A zero or negative value means no TTL. */ public static void setTTL(DatasetProperties.Builder builder, long ttl) { builder.add(Table.PROPERTY_TTL, ttl); }
/** * Set the name of the Explore database to be used for Explore. */ public static void setExploreDatabaseName(DatasetProperties.Builder builder, String databaseName) { builder.add(PROPERTY_EXPLORE_DATABASE_NAME, databaseName); }
private static MetadataDataset getDataset(DatasetId instance, MetadataScope scope) throws Exception { return DatasetsUtil.getOrCreateDataset(dsFrameworkUtil.getFramework(), instance, MetadataDataset.class.getName(), DatasetProperties.builder() .add(MetadataDatasetDefinition.SCOPE_KEY, scope.name()) .build(), null); } }
private static MetadataDataset getMetadataDataset(DatasetContext context, DatasetFramework dsFramework, MetadataScope scope) { try { return DatasetsUtil.getOrCreateDataset(context, dsFramework, getMetadataDatasetInstance(scope), MetadataDataset.class.getName(), DatasetProperties.builder().add(MetadataDatasetDefinition.SCOPE_KEY, scope.name()).build()); } catch (DatasetManagementException | IOException e) { throw Throwables.propagate(e); } }
@Override public void configure() { setName(NAME); setDescription("SampleWorkflow description"); createLocalDataset(TABLE_NAME, KeyValueTable.class, DatasetProperties.builder().add("foo", "bar").build()); createLocalDataset(FILE_NAME, FileSet.class, DatasetProperties.builder().add("anotherFoo", "anotherBar").build()); addAction(new DummyAction(FIRST_ACTION)); addAction(new DummyAction(SECOND_ACTION)); addMapReduce(WORD_COUNT_MR); } }
private DatasetProperties addIndexColumns(DatasetProperties properties, String... indexColumns) { return DatasetProperties .builder() .addAll(properties.getProperties()) .add(IndexedTable.INDEX_COLUMNS_CONF_KEY, Joiner.on(",").join(indexColumns)) .build(); }
/** * Set a conflict detection level in dataset properties. */ public static void setConflictDetection(DatasetProperties.Builder builder, ConflictDetection level) { builder.add(Table.PROPERTY_CONFLICT_LEVEL, level.name()); }
@Override public void configure() { DatasetProperties props = DatasetProperties.builder() .add("dataset.cube.resolutions", "1,60") .add("dataset.cube.aggregation.agg1.dimensions", "user,action") .add("dataset.cube.aggregation.agg1.requiredDimensions", "user,action").build(); createDataset(CUBE_NAME, Cube.class, props); addService(SERVICE_NAME, new CubeHandler()); }
private DatasetProperties computeFactTableProperties(DatasetProperties props) { // Configuring tables that hold data of specific resolution Map<String, Aggregation> aggregations = getAggregations(props.getProperties()); // Adding pre-splitting for fact tables byte[][] splits = FactTable.getSplits(aggregations.size()); // and combine them return DatasetProperties.builder() .addAll(props.getProperties()) .add(HBaseTableAdmin.PROPERTY_SPLITS, GSON.toJson(splits)) .build(); }
/** * Set the column family in a table's dataset properties. */ public static void setColumnFamily(DatasetProperties.Builder builder, String family) { builder.add(Table.PROPERTY_COLUMN_FAMILY, family); }
private DatasetProperties computeFactTableProperties(DatasetProperties props) { // Configuring tables that hold data of specific resolution Map<String, Aggregation> aggregations = getAggregations(props.getProperties()); // Adding pre-splitting for fact tables byte[][] splits = FactTable.getSplits(aggregations.size()); // and combine them return DatasetProperties.builder() .addAll(props.getProperties()) .add(HBaseTableAdmin.PROPERTY_SPLITS, GSON.toJson(splits)) .build(); }
@Override public void configure() { setName("FooApp"); setDescription("Foo App"); createDataset("dataset2", KeyValueTable.class); createDataset("dataset3", IndexedTable.class, DatasetProperties.builder().add(IndexedTable.INDEX_COLUMNS_CONF_KEY, "foo").build()); addMapReduce(new FooMapReduceJob("mrJob2")); addMapReduce(new FooMapReduceJob("mrJob3")); } }
/** * Set the name of the Explore table to be used for Explore. */ public static void setExploreTableName(DatasetProperties.Builder builder, String tableName) { builder.add(PROPERTY_EXPLORE_TABLE_NAME, tableName); }
public static MetadataDataset getMetadataDataset(DatasetContext context, DatasetFramework dsFramework, MetadataScope scope) { try { return DatasetsUtil.getOrCreateDataset( context, dsFramework, getMetadataDatasetInstance(scope), MetadataDataset.class.getName(), DatasetProperties.builder().add(MetadataDatasetDefinition.SCOPE_KEY, scope.name()).build()); } catch (DatasetManagementException | IOException e) { throw Throwables.propagate(e); } }