private boolean fullyQualifiedDatasetNameExists(String name) throws AlgebricksException { if (name.indexOf('.') < 0) { return false; } String[] path = StringUtils.split(name, '.'); return path.length == 2 && metadataProvider.findDataset(path[0], path[1]) != null; }
public static void flushDataset(IHyracksClientConnection hcc, MetadataProvider metadataProvider, String dataverseName, String datasetName) throws Exception { Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName); flushDataset(hcc, metadataProvider, dataset); }
private boolean datasetExists(String dataverseName, String datasetName, SourceLocation sourceLoc) throws CompilationException { try { return metadataProvider.findDataset(dataverseName, datasetName) != null || fullyQualifiedDatasetNameExists(datasetName); } catch (AlgebricksException e) { throw new CompilationException(ErrorCode.COMPILATION_ERROR, e, sourceLoc, e.getMessage()); } }
public static Dataset validateIfDatasetExists(MetadataProvider metadataProvider, String dataverse, String datasetName) throws AlgebricksException { Dataset dataset = metadataProvider.findDataset(dataverse, datasetName); if (dataset == null) { throw new CompilationException("Unknown target dataset :" + datasetName); } if (!dataset.getDatasetType().equals(DatasetType.INTERNAL)) { throw new CompilationException("Statement not applicable. Dataset " + datasetName + " is not of required type " + DatasetType.INTERNAL); } return dataset; }
when(metadataProvider.getConfig()).thenReturn(config); when(config.get(FunctionUtil.IMPORT_PRIVATE_FUNCTIONS)).thenReturn("true"); when(metadataProvider.findDataset(anyString(), anyString())).thenReturn(mock(Dataset.class));
JobSpecification spec) throws AlgebricksException { String datasetName = dataSource.getId().getDatasourceName(); Dataset dataset = findDataset(dataSource.getId().getDataverseName(), datasetName); if (dataset == null) { throw new AlgebricksException(
@Override public StorageComponentsDatasource toDatasource(IOptimizationContext context, AbstractFunctionCallExpression f) throws AlgebricksException { String dataverseName = getString(f.getArguments(), 0); String datasetName = getString(f.getArguments(), 1); MetadataProvider metadataProvider = (MetadataProvider) context.getMetadataProvider(); Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName); if (dataset == null) { throw new CompilationException(ErrorCode.UNKNOWN_DATASET_IN_DATAVERSE, f.getSourceLocation(), datasetName, dataverseName); } return new StorageComponentsDatasource(context.getComputationNodeDomain(), dataset.getDatasetId()); } }
@Override public DatasetResourcesDatasource toDatasource(IOptimizationContext context, AbstractFunctionCallExpression f) throws AlgebricksException { String dataverseName = getString(f.getArguments(), 0); String datasetName = getString(f.getArguments(), 1); MetadataProvider metadataProvider = (MetadataProvider) context.getMetadataProvider(); Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName); if (dataset == null) { throw new CompilationException(ErrorCode.UNKNOWN_DATASET_IN_DATAVERSE, f.getSourceLocation(), datasetName, dataverseName); } return new DatasetResourcesDatasource(context.getComputationNodeDomain(), dataset.getDatasetId()); } }
throw new AlgebricksException("Unspecified dataverse!"); Dataset dataset = metadata.findDataset(dataverseName, datasetName); if (dataset == null) { throw new AlgebricksException("Could not find dataset " + datasetName + " in dataverse " + dataverseName);
"Unexpected function for Unnest Map: " + fid); return ((MetadataProvider) context.getMetadataProvider()).findDataset(dataverseName, datasetName);
.findDataset(originalBTreeParameters.getDataverseName(), originalBTreeParameters.getDatasetName());
public static JobSpecification compactDatasetJobSpec(Dataverse dataverse, String datasetName, MetadataProvider metadataProvider) throws AlgebricksException { String dataverseName = dataverse.getDataverseName(); Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName); if (dataset == null) { throw new AsterixException("Could not find dataset " + datasetName + " in dataverse " + dataverseName); } JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext()); Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadataProvider.getSplitProviderAndConstraints(dataset); IIndexDataflowHelperFactory indexHelperFactory = new IndexDataflowHelperFactory( metadataProvider.getStorageComponentProvider().getStorageManager(), splitsAndConstraint.first); LSMTreeIndexCompactOperatorDescriptor compactOp = new LSMTreeIndexCompactOperatorDescriptor(spec, indexHelperFactory); AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, compactOp, splitsAndConstraint.second); AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, compactOp, splitsAndConstraint.second); spec.addRoot(compactOp); return spec; }
private DatasetDataSource validateDatasetInfo(MetadataProvider metadataProvider, String dataverseName, String datasetName, SourceLocation sourceLoc) throws AlgebricksException { Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName); if (dataset == null) { throw new CompilationException(ErrorCode.UNKNOWN_DATASET_IN_DATAVERSE, sourceLoc, datasetName, dataverseName); } if (dataset.getDatasetType() == DatasetType.EXTERNAL) { throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc, "Cannot write output to an external dataset."); } DataSourceId sourceId = new DataSourceId(dataverseName, datasetName); IAType itemType = metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName()); IAType metaItemType = metadataProvider.findType(dataset.getMetaItemTypeDataverseName(), dataset.getMetaItemTypeName()); INodeDomain domain = metadataProvider.findNodeDomain(dataset.getNodeGroupName()); return new DatasetDataSource(sourceId, dataset, itemType, metaItemType, DataSource.Type.INTERNAL_DATASET, dataset.getDatasetDetails(), domain); }
final Dataset dataset = metadataProvider.findDataset(null, datasetName); Assert.assertNotNull(dataset);
/** * Gets the reference of dataset {@code dataset} from metadata * * @param integrationUtil * @param datasetName * @return the dataset reference if found. Otherwise null. * @throws AlgebricksException * @throws RemoteException */ public static Dataset getDataset(AsterixHyracksIntegrationUtil integrationUtil, String datasetName) throws AlgebricksException, RemoteException { final ICcApplicationContext appCtx = (ICcApplicationContext) integrationUtil.getClusterControllerService().getApplicationContext(); final MetadataProvider metadataProvider = new MetadataProvider(appCtx, null); final MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); metadataProvider.setMetadataTxnContext(mdTxnCtx); Dataset dataset; try { dataset = metadataProvider.findDataset(MetadataBuiltinEntities.DEFAULT_DATAVERSE_NAME, datasetName); } finally { MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); metadataProvider.getLocks().unlock(); } return dataset; }
Dataset dataset = mp.findDataset(jobGenParams.getDataverseName(), jobGenParams.getDatasetName()); IVariableTypeEnvironment typeEnv = context.getTypeEnvironment(unnestMap); List<LogicalVariable> outputVars = unnestMap.getVariables();
private ARecordType getMetadataRecordType(String dataverseName, String datasetName) throws Exception { MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); // Retrieves file splits of the dataset. MetadataProvider metadataProvider = new MetadataProvider( (ICcApplicationContext) ExecutionTestUtil.integrationUtil.cc.getApplicationContext(), null); try { metadataProvider.setMetadataTxnContext(mdTxnCtx); Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName); ARecordType recordType = (ARecordType) metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName()); // Metadata transaction commits. MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); return recordType; } finally { metadataProvider.getLocks().unlock(); } } }
Dataset dataset = metadataProvider.findDataset(jobGenParams.getDataverseName(), jobGenParams.getDatasetName()); int[] keyIndexes = getKeyIndexes(jobGenParams.getKeyVarList(), inputSchemas);
Dataset dataset = metadataProvider.findDataset(jobGenParams.getDataverseName(), jobGenParams.getDatasetName()); IVariableTypeEnvironment typeEnv = context.getTypeEnvironment(op); ITupleFilterFactory tupleFilterFactory = null;
FeedConnection fc = MetadataManager.INSTANCE.getFeedConnection(metadataProvider.getMetadataTxnContext(), dataverseName, feedName, datasetName); Dataset ds = metadataProvider.findDataset(dataverseName, datasetName); if (ds == null) { throw new CompilationException(ErrorCode.UNKNOWN_DATASET_IN_DATAVERSE, sourceLoc, datasetName,