public IAType findMetaType(Dataset dataset) throws AlgebricksException { return findType(dataset.getMetaItemTypeDataverseName(), dataset.getMetaItemTypeName()); }
public IAType findType(Dataset dataset) throws AlgebricksException { return findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName()); }
public static ARecordType getMetaType(MetadataProvider metadataProvider, Dataset dataset) throws AlgebricksException { if (dataset.hasMetaPart()) { return (ARecordType) metadataProvider.findType(dataset.getMetaItemTypeDataverseName(), dataset.getMetaItemTypeName()); } return null; }
/** * Gets the hash function factories for the primary key fields of this dataset. * * @param metadataProvider, * the metadata provider. * @return the hash function factories for the primary key fields of this dataset. * @throws AlgebricksException */ public IBinaryHashFunctionFactory[] getPrimaryHashFunctionFactories(MetadataProvider metadataProvider) throws AlgebricksException { ARecordType recordType = (ARecordType) metadataProvider.findType(this); ARecordType metaType = (ARecordType) metadataProvider.findMetaType(this); List<List<String>> partitioningKeys = getPrimaryKeys(); int numPrimaryKeys = partitioningKeys.size(); IBinaryHashFunctionFactory[] hashFuncFactories = new IBinaryHashFunctionFactory[numPrimaryKeys]; List<Integer> indicators = null; if (hasMetaPart()) { indicators = ((InternalDatasetDetails) getDatasetDetails()).getKeySourceIndicator(); } for (int i = 0; i < numPrimaryKeys; i++) { IAType keyType = (indicators == null || indicators.get(i) == 0) ? recordType.getSubFieldType(partitioningKeys.get(i)) : metaType.getSubFieldType(partitioningKeys.get(i)); hashFuncFactories[i] = BinaryHashFunctionFactoryProvider.INSTANCE.getBinaryHashFunctionFactory(keyType); } return hashFuncFactories; }
protected SecondaryIndexOperationsHelper(Dataset dataset, Index index, MetadataProvider metadataProvider, SourceLocation sourceLoc) throws AlgebricksException { this.dataset = dataset; this.index = index; this.metadataProvider = metadataProvider; this.itemType = (ARecordType) metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName()); this.metaType = DatasetUtil.getMetaType(metadataProvider, dataset); Pair<ARecordType, ARecordType> enforcedTypes = getEnforcedType(index, itemType, metaType); this.enforcedItemType = enforcedTypes.first; this.enforcedMetaType = enforcedTypes.second; this.sourceLoc = sourceLoc; this.sortNumFrames = getSortNumFrames(metadataProvider, sourceLoc); }
ARecordType recordType = (ARecordType) metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName()); List<List<String>> primaryKeys = dataset.getPrimaryKeys();
List<LogicalVariable> pkVars) throws AlgebricksException { Dataset dataset = metadataProvider.findDataset(aqlId.getDataverseName(), targetDataset); ARecordType feedOutputType = (ARecordType) metadataProvider.findType(aqlId.getDataverseName(), outputType); Feed sourceFeed = metadataProvider.findFeed(aqlId.getDataverseName(), sourceFeedName); FeedConnection feedConnection = metaTypeName = metaTypeName.substring(metaTypeName.indexOf('.') + 1); metaType = (ARecordType) metadataProvider.findType(dataverseName, metaTypeName);
private DatasetDataSource validateDatasetInfo(MetadataProvider metadataProvider, String dataverseName, String datasetName, SourceLocation sourceLoc) throws AlgebricksException { Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName); if (dataset == null) { throw new CompilationException(ErrorCode.UNKNOWN_DATASET_IN_DATAVERSE, sourceLoc, datasetName, dataverseName); } if (dataset.getDatasetType() == DatasetType.EXTERNAL) { throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc, "Cannot write output to an external dataset."); } DataSourceId sourceId = new DataSourceId(dataverseName, datasetName); IAType itemType = metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName()); IAType metaItemType = metadataProvider.findType(dataset.getMetaItemTypeDataverseName(), dataset.getMetaItemTypeName()); INodeDomain domain = metadataProvider.findNodeDomain(dataset.getNodeGroupName()); return new DatasetDataSource(sourceId, dataset, itemType, metaItemType, DataSource.Type.INTERNAL_DATASET, dataset.getDatasetDetails(), domain); }
public static JobSpecification createDatasetJobSpec(Dataset dataset, MetadataProvider metadataProvider) throws AlgebricksException { Index index = IndexUtil.getPrimaryIndex(dataset); ARecordType itemType = (ARecordType) metadataProvider.findType(dataset);
throw new AlgebricksException("Unspecified dataverse!"); IAType t2 = metadata.findType(dataverseName, typeName); if (t2 == null) { throw new AlgebricksException("Unknown type " + typeName);
.findType(dataset.getMetaItemTypeDataverseName(), dataset.getMetaItemTypeName()); ARecordType metaRecType = (ARecordType) metaItemType; int numSecondaryKeys = KeyFieldTypeUtil.getNumSecondaryKeys(index, recType, metaRecType);
indicators = ((InternalDatasetDetails) getDatasetDetails()).getKeySourceIndicator(); ARecordType itemType = (ARecordType) metadataProvider.findType(this); ARecordType metaType = (ARecordType) metadataProvider.findMetaType(this);
IAType t2 = metadata.findType(dataset.getItemTypeDataverseName(), tn); if (t2 == null) { throw new AlgebricksException("No type for dataset " + datasetName);
filterFieldName = DatasetUtil.getFilterField(dataset); IAType itemType = ((MetadataProvider) context.getMetadataProvider()) .findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName()); if (itemType.getTypeTag() == ATypeTag.OBJECT) { recType = (ARecordType) itemType;
originalBTreeParameters.getHighKeyVarList().size()); ARecordType recordType = (ARecordType) ((MetadataProvider) context.getMetadataProvider()).findType(dataset); ARecordType metaRecordType = (ARecordType) ((MetadataProvider) context.getMetadataProvider()).findMetaType(dataset);
if (pos != null) { String tName = dataset.getItemTypeName(); IAType t = mp.findType(dataset.getItemTypeDataverseName(), tName); if (t.getTypeTag() != ATypeTag.OBJECT) { return false;
byte[] failValueForIndexOnlyPlan = null; if (isIndexOnlyPlan) { ARecordType recType = (ARecordType) findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName()); List<List<String>> secondaryKeyFields = secondaryIndex.getKeyFieldNames(); List<IAType> secondaryKeyTypes = secondaryIndex.getKeyFieldTypes();
private ARecordType getMetadataRecordType(String dataverseName, String datasetName) throws Exception { MetadataTransactionContext mdTxnCtx = MetadataManager.INSTANCE.beginTransaction(); // Retrieves file splits of the dataset. MetadataProvider metadataProvider = new MetadataProvider( (ICcApplicationContext) ExecutionTestUtil.integrationUtil.cc.getApplicationContext(), null); try { metadataProvider.setMetadataTxnContext(mdTxnCtx); Dataset dataset = metadataProvider.findDataset(dataverseName, datasetName); ARecordType recordType = (ARecordType) metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName()); // Metadata transaction commits. MetadataManager.INSTANCE.commitTransaction(mdTxnCtx); return recordType; } finally { metadataProvider.getLocks().unlock(); } } }
int numKeys = dataset.getPrimaryKeys().size(); int numFilterFields = DatasetUtil.getFilterField(dataset) == null ? 0 : 1; ARecordType itemType = (ARecordType) metadataProvider.findType(dataset); ARecordType metaItemType = (ARecordType) metadataProvider.findMetaType(dataset); Index primaryIndex = metadataProvider.getIndex(dataset.getDataverseName(), dataset.getDatasetName(),
public static JobSpecification buildFilesIndexCreateJobSpec(Dataset dataset, List<ExternalFile> externalFilesSnapshot, MetadataProvider metadataProvider) throws AlgebricksException { IStorageComponentProvider storageComponentProvider = metadataProvider.getStorageComponentProvider(); JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext()); Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtil.getMergePolicyFactory(dataset, metadataProvider.getMetadataTxnContext()); ILSMMergePolicyFactory mergePolicyFactory = compactionInfo.first; Map<String, String> mergePolicyProperties = compactionInfo.second; Pair<IFileSplitProvider, AlgebricksPartitionConstraint> secondarySplitsAndConstraint = metadataProvider .getSplitProviderAndConstraints(dataset, IndexingConstants.getFilesIndexName(dataset.getDatasetName())); IFileSplitProvider secondaryFileSplitProvider = secondarySplitsAndConstraint.first; String fileIndexName = IndexingConstants.getFilesIndexName(dataset.getDatasetName()); Index fileIndex = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(), dataset.getDataverseName(), dataset.getDatasetName(), fileIndexName); ARecordType recordType = (ARecordType) metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName()); IResourceFactory resourceFactory = dataset.getResourceFactory(metadataProvider, fileIndex, recordType, null, mergePolicyFactory, mergePolicyProperties); IIndexBuilderFactory indexBuilderFactory = new IndexBuilderFactory(storageComponentProvider.getStorageManager(), secondaryFileSplitProvider, resourceFactory, true); IIndexDataflowHelperFactory dataflowHelperFactory = new IndexDataflowHelperFactory( storageComponentProvider.getStorageManager(), secondaryFileSplitProvider); ExternalFilesIndexCreateOperatorDescriptor externalFilesOp = new ExternalFilesIndexCreateOperatorDescriptor( spec, indexBuilderFactory, dataflowHelperFactory, externalFilesSnapshot); AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, externalFilesOp, secondarySplitsAndConstraint.second); spec.addRoot(externalFilesOp); spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy()); return spec; }