@Override public String toString() { return scope.name() + ':' + name; } }
/** * @return all tags for the given scope */ public Set<String> getTags(MetadataScope scope) { return tags.stream() .filter(tag -> scope.equals(tag.getScope())) .map(ScopedName::getName) .collect(Collectors.toSet()); }
private MetadataScope validateScope(String scope) throws BadRequestException { try { return MetadataScope.valueOf(scope.toUpperCase()); } catch (IllegalArgumentException e) { throw new BadRequestException(String.format("Invalid metadata scope '%s'. Expected '%s' or '%s'", scope, MetadataScope.USER, MetadataScope.SYSTEM)); } }
@Override public void perform(Arguments arguments, PrintStream output) throws Exception { MetadataEntity metadataEntity = MetadataCommandHelper.toMetadataEntity(arguments.get(ArgumentName.ENTITY.toString())); String scope = arguments.getOptional(ArgumentName.METADATA_SCOPE.toString()); Set<MetadataRecord> metadata = scope == null ? client.getMetadata(metadataEntity) : client.getMetadata(metadataEntity, MetadataScope.valueOf(scope.toUpperCase())); Table table = getTableBuilder().setRows( metadata.stream().map(record -> Lists.newArrayList( record.toString(), Joiner.on("\n").join(record.getTags()), Joiner.on("\n").withKeyValueSeparator(":").join(record.getProperties()), record.getScope().name())).collect(Collectors.toList()) ).build(); cliConfig.getTableRenderer().render(cliConfig, output, table); }
client.getMetadata(entityId, MetadataScope.valueOf(scope.toUpperCase())); Joiner.on("\n").join(record.getTags()), Joiner.on("\n").withKeyValueSeparator(":").join(record.getProperties()), record.getScope().name())).collect(Collectors.toList()) ).build(); } catch (IllegalArgumentException e) { client.getMetadata(metadataEntity, MetadataScope.valueOf(scope.toUpperCase())); Joiner.on("\n").join(record.getTags()), Joiner.on("\n").withKeyValueSeparator(":").join(record.getProperties()), record.getScope().name())).collect(Collectors.toList()) ).build();
public static MetadataDataset getMetadataDataset(DatasetContext context, DatasetFramework dsFramework, MetadataScope scope) { try { return DatasetsUtil.getOrCreateDataset( context, dsFramework, getMetadataDatasetInstance(scope), MetadataDataset.class.getName(), DatasetProperties.builder().add(MetadataDatasetDefinition.SCOPE_KEY, scope.name()).build()); } catch (DatasetManagementException | IOException e) { throw Throwables.propagate(e); } }
private MetadataScope validateScope(String scope) throws BadRequestException { try { return MetadataScope.valueOf(scope.toUpperCase()); } catch (IllegalArgumentException e) { throw new BadRequestException(String.format("Invalid metadata scope '%s'. Expected '%s' or '%s'", scope, MetadataScope.USER, MetadataScope.SYSTEM)); } }
/** * @return all properties for a given scope */ public Map<String, String> getProperties(MetadataScope scope) { return properties.entrySet().stream() .filter(entry -> scope.equals(entry.getKey().getScope())) .collect(Collectors.toMap(entry -> entry.getKey().getName(), Map.Entry::getValue)); }
private static MetadataDataset getMetadataDataset(DatasetContext context, DatasetFramework dsFramework, MetadataScope scope) { try { return DatasetsUtil.getOrCreateDataset(context, dsFramework, getMetadataDatasetInstance(scope), MetadataDataset.class.getName(), DatasetProperties.builder().add(MetadataDatasetDefinition.SCOPE_KEY, scope.name()).build()); } catch (DatasetManagementException | IOException e) { throw Throwables.propagate(e); } }
@Override public MetadataDataset getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { String scope = spec.getProperty(SCOPE_KEY); return new MetadataDataset(indexedTableDef.getDataset(datasetContext, spec.getSpecification(METADATA_INDEX_TABLE_NAME), arguments, classLoader), scope == null ? MetadataScope.USER : MetadataScope.valueOf(scope)); } }
private void checkWriteAllowed(MetadataScope scope, MetadataEntity metadataEntity) { if (isMigrationCompleted()) { return; } // we do not support writes to V1 tables during migration. So throw an error if we are trying to update // an existing entity if ((scope.equals(MetadataScope.SYSTEM) ? hasV1SystemDs : hasV1BusinessDs) && hasEntityInV1(scope, metadataEntity)) { throw new ServiceUnavailableException("metadata-service", "Metadata migration is in progress. Please retry the same operation " + "once metadata is migrated."); } }
private MetadataDataset getMetadataDataset(DatasetContext context, DatasetId datasetId) throws IOException, DatasetManagementException { MetadataScope scope = datasetId.getDataset().contains("business") ? MetadataScope.USER : MetadataScope.SYSTEM; return DatasetsUtil.getOrCreateDataset(context, dsFramework, datasetId, MetadataDataset.class.getName(), DatasetProperties.builder().add(MetadataDatasetDefinition.SCOPE_KEY, scope.name()).build()); } }
@Override public MetadataDataset getDataset(DatasetContext datasetContext, DatasetSpecification spec, Map<String, String> arguments, ClassLoader classLoader) throws IOException { String scope = spec.getProperty(SCOPE_KEY); return new MetadataDataset(indexedTableDef.getDataset(datasetContext, spec.getSpecification(METADATA_INDEX_TABLE_NAME), arguments, classLoader), scope == null ? MetadataScope.USER : MetadataScope.valueOf(scope)); } }
private Metadata filterBy(Metadata metadata, MetadataScope scope, MetadataKind kind) { //noinspection ConstantConditions return new Metadata( kind == PROPERTY ? Collections.emptySet() : Sets.filter(metadata.getTags(), tag -> scope == null || scope.equals(tag.getScope())), kind == MetadataKind.TAG ? Collections.EMPTY_MAP : Maps.filterKeys(metadata.getProperties(), key -> scope == null || scope.equals(key.getScope()))); }
private static MetadataDataset getMetadataDataset(DatasetContext context, DatasetFramework dsFramework, MetadataScope scope, DATASET ds) { try { if (ds.equals(DATASET.V1)) { DatasetId metadataDatasetInstance = getMetadataDatasetInstance(scope); return context.getDataset(metadataDatasetInstance.getNamespace(), metadataDatasetInstance.getDataset()); } return DatasetsUtil.getOrCreateDataset(context, dsFramework, getV2MetadataDatasetInstance(scope), MetadataDataset.class.getName(), DatasetProperties.builder().add(MetadataDatasetDefinition.SCOPE_KEY, scope.name()).build()); } catch (DatasetManagementException | IOException e) { throw Throwables.propagate(e); } }
@Override public void perform(Arguments arguments, PrintStream output) throws Exception { MetadataEntity metadataEntity = MetadataCommandHelper.toMetadataEntity(arguments.get(ArgumentName.ENTITY.toString())); String scope = arguments.getOptional(ArgumentName.METADATA_SCOPE.toString()); Map<String, String> properties = scope == null ? client.getProperties(metadataEntity) : client.getProperties(metadataEntity, MetadataScope.valueOf(scope.toUpperCase())); Table table = Table.builder() .setHeader("key", "value") .setRows( Iterables.transform(properties.entrySet(), new Function<Map.Entry<String, String>, List<String>>() { @Nullable @Override public List<String> apply(@Nullable Map.Entry<String, String> entry) { return Lists.newArrayList(entry.getKey(), entry.getValue()); } }) ).build(); cliConfig.getTableRenderer().render(cliConfig, output, table); }
try { boolean hasProperties, hasTags; if (MetadataScope.USER.equals(put.getScope())) { hasProperties = validateProperties(put.getEntity(), put.getProperties()); hasTags = validateTags(put.getEntity(), put.getTags());
@Override public void createOrUpgrade(MetadataScope scope) throws DatasetManagementException, IOException { DatasetId datasetId = getMetadataDatasetInstance(scope); if (dsFramework.hasInstance(datasetId)) { if (isUpgradeRequired(scope)) { dsFramework.updateInstance( datasetId, DatasetProperties.builder().add(MetadataDatasetDefinition.SCOPE_KEY, scope.name()).build() ); removeNullOrEmptyTags(datasetId, scope); } } else { DatasetsUtil.createIfNotExists( dsFramework, datasetId, MetadataDataset.class.getName(), DatasetProperties.builder().add(MetadataDatasetDefinition.SCOPE_KEY, scope.name()).build()); markUpgradeComplete(scope); } }
@Override public void perform(Arguments arguments, PrintStream output) throws Exception { MetadataEntity metadataEntity = MetadataCommandHelper.toMetadataEntity(arguments.get(ArgumentName.ENTITY.toString())); String scope = arguments.getOptional(ArgumentName.METADATA_SCOPE.toString()); Map<String, String> properties = scope == null ? client.getProperties(metadataEntity) : client.getProperties(metadataEntity, MetadataScope.valueOf(scope.toUpperCase())); Table table = Table.builder() .setHeader("key", "value") .setRows( Iterables.transform(properties.entrySet(), new Function<Map.Entry<String, String>, List<String>>() { @Nullable @Override public List<String> apply(@Nullable Map.Entry<String, String> entry) { return Lists.newArrayList(entry.getKey(), entry.getValue()); } }) ).build(); cliConfig.getTableRenderer().render(cliConfig, output, table); }
private static MetadataDataset getDataset(DatasetId instance, MetadataScope scope) throws Exception { return DatasetsUtil.getOrCreateDataset(dsFrameworkUtil.getFramework(), instance, MetadataDataset.class.getName(), DatasetProperties.builder() .add(MetadataDatasetDefinition.SCOPE_KEY, scope.name()) .build(), null); } }