public Map<String, Object> toMap() { Map<String, Object> tree = new HashMap<>(); tree.put("datasetId", Integer.toString(datasetId)); tree.put("dataverseName", dataverseName); tree.put("datasetName", datasetName); tree.put("recordTypeDataverseName", recordTypeDataverseName); tree.put("recordTypeName", recordTypeName); tree.put("nodeGroupName", nodeGroupName); tree.put("compactionPolicyFactory", compactionPolicyFactory); tree.put("hints", hints); tree.put("compactionPolicyProperties", compactionPolicyProperties); tree.put("datasetType", datasetType.name()); tree.put("datasetDetails", datasetDetails.toString()); tree.put("metaTypeDataverseName", metaTypeDataverseName); tree.put("metaTypeName", metaTypeName); tree.put("pendingOp", MetadataUtil.pendingOpToString(pendingOp)); tree.put("rebalanceCount", rebalanceCount); tree.put("compressionScheme", compressionScheme); return tree; }
String typeDataverseName = ((AString) datasetRecord .getValueByPos(MetadataRecordTypes.DATASET_ARECORD_DATATYPEDATAVERSENAME_FIELD_INDEX)).getStringValue(); DatasetType datasetType = DatasetType.valueOf( ((AString) datasetRecord.getValueByPos(MetadataRecordTypes.DATASET_ARECORD_DATASETTYPE_FIELD_INDEX)) .getStringValue());
aString.setValue(dataset.getDatasetType().toString()); stringSerde.serialize(aString, fieldValue.getDataOutput()); recordBuilder.addField(MetadataRecordTypes.DATASET_ARECORD_DATASETTYPE_FIELD_INDEX, fieldValue);
default: throw new CompilationException(ErrorCode.COMPILATION_UNKNOWN_DATASET_TYPE, dataset.getDatasetType().toString());
private void addDataset(ICcApplicationContext appCtx, Dataset source, int datasetPostfix, boolean abort) throws Exception { Dataset dataset = new Dataset(source.getDataverseName(), "ds_" + datasetPostfix, source.getDataverseName(), source.getDatasetType().name(), source.getNodeGroupName(), NoMergePolicyFactory.NAME, null, source.getDatasetDetails(), source.getHints(), DatasetConfig.DatasetType.INTERNAL, datasetPostfix, 0); MetadataProvider metadataProvider = new MetadataProvider(appCtx, null); final MetadataTransactionContext writeTxn = MetadataManager.INSTANCE.beginTransaction(); metadataProvider.setMetadataTxnContext(writeTxn); try { MetadataManager.INSTANCE.addDataset(writeTxn, dataset); if (abort) { MetadataManager.INSTANCE.abortTransaction(writeTxn); } else { MetadataManager.INSTANCE.commitTransaction(writeTxn); } } finally { metadataProvider.getLocks().unlock(); } } }
public static DataSource lookupSourceInMetadata(IClusterStateManager clusterStateManager, MetadataTransactionContext mdTxnCtx, DataSourceId aqlId) throws AlgebricksException { Dataset dataset = findDataset(mdTxnCtx, aqlId.getDataverseName(), aqlId.getDatasourceName()); if (dataset == null) { throw new AlgebricksException("Datasource with id " + aqlId + " was not found."); } IAType itemType = findType(mdTxnCtx, dataset.getItemTypeDataverseName(), dataset.getItemTypeName()); IAType metaItemType = findType(mdTxnCtx, dataset.getMetaItemTypeDataverseName(), dataset.getMetaItemTypeName()); INodeDomain domain = findNodeDomain(clusterStateManager, mdTxnCtx, dataset.getNodeGroupName()); byte datasourceType = dataset.getDatasetType().equals(DatasetType.EXTERNAL) ? DataSource.Type.EXTERNAL_DATASET : DataSource.Type.INTERNAL_DATASET; return new DatasetDataSource(aqlId, dataset, itemType, metaItemType, datasourceType, dataset.getDatasetDetails(), domain); } }
public static Dataset validateIfDatasetExists(MetadataProvider metadataProvider, String dataverse, String datasetName) throws AlgebricksException { Dataset dataset = metadataProvider.findDataset(dataverse, datasetName); if (dataset == null) { throw new CompilationException("Unknown target dataset :" + datasetName); } if (!dataset.getDatasetType().equals(DatasetType.INTERNAL)) { throw new CompilationException("Statement not applicable. Dataset " + datasetName + " is not of required type " + DatasetType.INTERNAL); } return dataset; }