@Override protected int getNumSecondaryKeys() { return index.getKeyFieldNames().size(); }
@Override protected int getNumSecondaryKeys() { return index.getKeyFieldNames().size(); }
public static int[] getBtreeFieldsIfFiltered(Dataset dataset, Index index) throws AlgebricksException { if (index.isPrimaryIndex()) { return DatasetUtil.createBTreeFieldsWhenThereisAFilter(dataset); } int numPrimaryKeys = dataset.getPrimaryKeys().size(); int numSecondaryKeys = index.getKeyFieldNames().size(); int[] btreeFields = new int[numSecondaryKeys + numPrimaryKeys]; for (int k = 0; k < btreeFields.length; k++) { btreeFields[k] = k; } return btreeFields; }
/** * Get the types of BTree index key fields * * @param index, * the index to consider. * @param recordType, * the main record type. * @param metaRecordType * the auxiliary meta record type. * @return a list of IATypes, one for each corresponding index key field. * @throws AlgebricksException */ public static List<IAType> getBTreeIndexKeyTypes(Index index, ARecordType recordType, ARecordType metaRecordType) throws AlgebricksException { List<Integer> keySourceIndicators = index.getKeyFieldSourceIndicators(); List<IAType> indexKeyTypes = new ArrayList<>(); for (int i = 0; i < index.getKeyFieldNames().size(); i++) { Pair<IAType, Boolean> keyPairType = Index.getNonNullableOpenFieldType(index.getKeyFieldTypes().get(i), index.getKeyFieldNames().get(i), chooseSource(keySourceIndicators, i, recordType, metaRecordType)); indexKeyTypes.add(keyPairType.first); } return indexKeyTypes; }
private static int[] getBloomFilterFields(Dataset dataset, Index index) throws AlgebricksException { if (index.isPrimaryIndex()) { return dataset.getPrimaryBloomFilterFields(); } else if (dataset.getDatasetType() == DatasetType.EXTERNAL) { if (index.getIndexName().equals(IndexingConstants.getFilesIndexName(dataset.getDatasetName()))) { return FilesIndexDescription.BLOOM_FILTER_FIELDS; } else { return new int[] { index.getKeyFieldNames().size() }; } } int numKeys = index.getKeyFieldNames().size(); int[] bloomFilterKeyFields = new int[numKeys]; for (int i = 0; i < numKeys; i++) { bloomFilterKeyFields[i] = i; } return bloomFilterKeyFields; } }
private AbstractOperatorDescriptor createTokenizerOp(JobSpecification spec) { int docField = 0; int numSecondaryKeys = index.getKeyFieldNames().size(); int[] primaryKeyFields = new int[numPrimaryKeys + numFilterFields]; for (int i = 0; i < primaryKeyFields.length; i++) { primaryKeyFields[i] = numSecondaryKeys + i; } BinaryTokenizerOperatorDescriptor tokenizerOp = new BinaryTokenizerOperatorDescriptor(spec, tokenKeyPairRecDesc, tokenizerFactory, docField, primaryKeyFields, isPartitioned, false, false, MissingWriterFactory.INSTANCE); tokenizerOp.setSourceLocation(sourceLoc); AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, tokenizerOp, primaryPartitionConstraint); return tokenizerOp; }
private AbstractOperatorDescriptor createTokenizerOp(JobSpecification spec) throws AlgebricksException { int docField = NUM_TAG_FIELDS; int numSecondaryKeys = index.getKeyFieldNames().size(); int[] keyFields = new int[NUM_TAG_FIELDS + numPrimaryKeys + numFilterFields]; // set tag fields for (int i = 0; i < NUM_TAG_FIELDS; i++) { keyFields[i] = i; } // set primary key + filter fields for (int i = NUM_TAG_FIELDS; i < keyFields.length; i++) { keyFields[i] = i + numSecondaryKeys; } BinaryTokenizerOperatorDescriptor tokenizerOp = new BinaryTokenizerOperatorDescriptor(spec, getTaggedRecordDescriptor(tokenKeyPairRecDesc), tokenizerFactory, docField, keyFields, isPartitioned, false, true, MissingWriterFactory.INSTANCE); AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, tokenizerOp, primaryPartitionConstraint); return tokenizerOp; }
if (index.getKeyFieldNames().contains(fieldName) && index.getPendingOp() == MetadataUtil.PENDING_NO_OP) { indexCandidates.add(index); boolean isFieldTypeUnknown = fieldType == BuiltinType.AMISSING || fieldType == BuiltinType.ANY; if (isFieldTypeUnknown && (!index.isOverridingKeyFieldTypes() || index.isEnforced())) { IAType indexedType = index.getKeyFieldTypes().get(index.getKeyFieldNames().indexOf(fieldName)); optFuncExpr.setFieldType(varIdx, indexedType);
private void setFilterTypeTraitsAndComparators() throws AlgebricksException { filterTypeTraits = new ITypeTraits[numFilterFields]; filterCmpFactories = new IBinaryComparatorFactory[numFilterFields]; secondaryFilterFields = new int[numFilterFields]; primaryFilterFields = new int[numFilterFields]; primaryBTreeFields = new int[numPrimaryKeys + 1]; secondaryBTreeFields = new int[index.getKeyFieldNames().size() + numPrimaryKeys]; for (int i = 0; i < primaryBTreeFields.length; i++) { primaryBTreeFields[i] = i; } for (int i = 0; i < secondaryBTreeFields.length; i++) { secondaryBTreeFields[i] = i; } IAType type = itemType.getSubFieldType(filterFieldName); filterCmpFactories[0] = BinaryComparatorFactoryProvider.INSTANCE.getBinaryComparatorFactory(type, true); filterTypeTraits[0] = TypeTraitProvider.INSTANCE.getTypeTrait(type); secondaryFilterFields[0] = getNumSecondaryKeys() + numPrimaryKeys; primaryFilterFields[0] = numPrimaryKeys + 1; }
private static int[] secondaryFilterFields(Dataset dataset, Index index, ITypeTraits[] filterTypeTraits) throws CompilationException { if (filterTypeTraits == null) { return empty; } int numPrimaryKeys = dataset.getPrimaryKeys().size(); int numSecondaryKeys = index.getKeyFieldNames().size(); switch (index.getIndexType()) { case BTREE: return new int[] { numPrimaryKeys + numSecondaryKeys }; case RTREE: case LENGTH_PARTITIONED_NGRAM_INVIX: case LENGTH_PARTITIONED_WORD_INVIX: case SINGLE_PARTITION_NGRAM_INVIX: case SINGLE_PARTITION_WORD_INVIX: break; default: throw new CompilationException(ErrorCode.COMPILATION_UNKNOWN_INDEX_TYPE, index.getIndexType().toString()); } return empty; }
/** * Get the types of RTree index key fields * * @param index, * the index to consider. * @param recordType, * the main record type. * @param metaRecordType * the auxiliary meta record type. * @return a list of IATypes, one for each corresponding index key field. * @throws AlgebricksException */ public static List<IAType> getRTreeIndexKeyTypes(Index index, ARecordType recordType, ARecordType metaRecordType) throws AlgebricksException { List<Integer> keySourceIndicators = index.getKeyFieldSourceIndicators(); List<IAType> indexKeyTypes = new ArrayList<>(); ARecordType targetRecType = chooseSource(keySourceIndicators, 0, recordType, metaRecordType); Pair<IAType, Boolean> keyPairType = Index.getNonNullableOpenFieldType(index.getKeyFieldTypes().get(0), index.getKeyFieldNames().get(0), targetRecType); IAType keyType = keyPairType.first; IAType nestedKeyType = NonTaggedFormatUtil.getNestedSpatialType(keyType.getTypeTag()); int numKeys = KeyFieldTypeUtil.getNumSecondaryKeys(index, targetRecType, metaRecordType); for (int i = 0; i < numKeys; i++) { indexKeyTypes.add(nestedKeyType); } return indexKeyTypes; }
case LENGTH_PARTITIONED_WORD_INVIX: case LENGTH_PARTITIONED_NGRAM_INVIX: return index.getKeyFieldNames().size(); case RTREE: Pair<IAType, Boolean> keyPairType = Index.getNonNullableOpenFieldType(index.getKeyFieldTypes().get(0), index.getKeyFieldNames().get(0), chooseSource(keySourceIndicators, 0, recordType, metaRecordType)); IAType keyType = keyPairType.first;
int numSecondaryKeys = index.getKeyFieldNames().size(); ITypeTraitProvider typeTraitProvider = metadataProvider.getStorageComponentProvider().getTypeTraitProvider(); ITypeTraits[] secondaryTypeTraits = new ITypeTraits[numSecondaryKeys + numPrimaryKeys]; index.getKeyFieldNames().get(i), sourceType); IAType keyType = keyTypePair.first; secondaryTypeTraits[i] = typeTraitProvider.getTypeTrait(keyType);
private static IBinaryTokenizerFactory getTokenizerFactory(Dataset dataset, Index index, ARecordType recordType, ARecordType metaType) throws AlgebricksException { int numPrimaryKeys = dataset.getPrimaryKeys().size(); int numSecondaryKeys = index.getKeyFieldNames().size(); IndexType indexType = index.getIndexType(); index.getKeyFieldNames().get(0), sourceType); IAType secondaryKeyType = keyTypePair.first;
int numSecondaryKeys = index.getKeyFieldNames().size(); IBinaryComparatorFactoryProvider cmpFactoryProvider = metadataProvider.getStorageComponentProvider().getComparatorFactoryProvider(); index.getKeyFieldNames().get(i), sourceType); IAType keyType = keyTypePair.first; secondaryCmpFactories[i] = cmpFactoryProvider.getBinaryComparatorFactory(keyType, true);
private static int[] getBloomFilterFields(Dataset dataset, Index index) throws AlgebricksException { if (index.isPrimaryIndex()) { return dataset.getPrimaryBloomFilterFields(); } else if (dataset.getDatasetType() == DatasetType.EXTERNAL) { if (index.getIndexName().equals(IndexingConstants.getFilesIndexName(dataset.getDatasetName()))) { return FilesIndexDescription.BLOOM_FILTER_FIELDS; } else { return new int[] { index.getKeyFieldNames().size() }; } } int numKeys = index.getKeyFieldNames().size(); int[] bloomFilterKeyFields = new int[numKeys]; for (int i = 0; i < numKeys; i++) { bloomFilterKeyFields[i] = i; } return bloomFilterKeyFields; } }
IBinaryComparatorFactoryProvider cmpFactoryProvider = metadataProvider.getStorageComponentProvider().getComparatorFactoryProvider(); List<List<String>> secondaryKeyFields = index.getKeyFieldNames(); int numSecondaryKeys = secondaryKeyFields.size(); if (numSecondaryKeys != 1) {
int numSecondaryKeys = index.getKeyFieldNames().size(); ITypeTraitProvider typeTraitProvider = metadataProvider.getStorageComponentProvider().getTypeTraitProvider(); ITypeTraits[] secondaryTypeTraits = new ITypeTraits[numSecondaryKeys + numPrimaryKeys]; index.getKeyFieldNames().get(i), sourceType); IAType keyType = keyTypePair.first; secondaryTypeTraits[i] = typeTraitProvider.getTypeTrait(keyType);
public SecondaryIndexInfo(PrimaryIndexInfo primaryIndexInfo, Index secondaryIndex) { this.primaryIndexInfo = primaryIndexInfo; this.secondaryIndex = secondaryIndex; List<String> nodes = Collections.singletonList(ExecutionTestUtil.integrationUtil.ncs[0].getId()); CcApplicationContext appCtx = (CcApplicationContext) ExecutionTestUtil.integrationUtil.cc.getApplicationContext(); FileSplit[] splits = SplitsAndConstraintsUtil.getIndexSplits(appCtx.getClusterStateManager(), primaryIndexInfo.dataset, secondaryIndex.getIndexName(), nodes); fileSplitProvider = new ConstantFileSplitProvider(splits); secondaryIndexTypeTraits = createSecondaryIndexTypeTraits(primaryIndexInfo.recordType, primaryIndexInfo.metaType, primaryIndexInfo.primaryKeyTypes, secondaryIndex.getKeyFieldTypes().toArray(new IAType[secondaryIndex.getKeyFieldTypes().size()])); secondaryIndexSerdes = createSecondaryIndexSerdes(primaryIndexInfo.recordType, primaryIndexInfo.metaType, primaryIndexInfo.primaryKeyTypes, secondaryIndex.getKeyFieldTypes().toArray(new IAType[secondaryIndex.getKeyFieldTypes().size()])); rDesc = new RecordDescriptor(secondaryIndexSerdes, secondaryIndexTypeTraits); insertFieldsPermutations = new int[secondaryIndexTypeTraits.length]; for (int i = 0; i < insertFieldsPermutations.length; i++) { insertFieldsPermutations[i] = i; } primaryKeyIndexes = new int[primaryIndexInfo.primaryKeyIndexes.length]; for (int i = 0; i < primaryKeyIndexes.length; i++) { primaryKeyIndexes[i] = i + secondaryIndex.getKeyFieldNames().size(); } }
IIndexDataflowHelperFactory primaryIndexHelperFactory = new IndexDataflowHelperFactory( storageComponentProvider.getStorageManager(), primaryIndexInfo.getFileSplitProvider()); int[] fieldPermutation = new int[secondaryIndex.getKeyFieldNames().size()]; for (int i = 0; i < fieldPermutation.length; i++) { fieldPermutation[i] = i;