public static JobSpecification buildSecondaryIndexCompactJobSpec(Dataset dataset, Index index, MetadataProvider metadataProvider, SourceLocation sourceLoc) throws AlgebricksException { SecondaryIndexOperationsHelper secondaryIndexHelper = SecondaryIndexOperationsHelper.createIndexOperationsHelper(dataset, index, metadataProvider, sourceLoc); return secondaryIndexHelper.buildCompactJobSpec(); }
public static JobSpecification buildSecondaryIndexCreationJobSpec(Dataset dataset, Index index, MetadataProvider metadataProvider, SourceLocation sourceLoc) throws AlgebricksException { SecondaryIndexOperationsHelper secondaryIndexHelper = SecondaryIndexOperationsHelper.createIndexOperationsHelper(dataset, index, metadataProvider, sourceLoc); return secondaryIndexHelper.buildCreationJobSpec(); }
public static JobSpecification buildDropIndexJobSpec(Index index, MetadataProvider metadataProvider, Dataset dataset, Set<DropOption> options, SourceLocation sourceLoc) throws AlgebricksException { SecondaryIndexOperationsHelper secondaryIndexHelper = SecondaryIndexOperationsHelper.createIndexOperationsHelper(dataset, index, metadataProvider, sourceLoc); return secondaryIndexHelper.buildDropJobSpec(options); }
protected SecondaryIndexOperationsHelper(Dataset dataset, Index index, MetadataProvider metadataProvider, SourceLocation sourceLoc) throws AlgebricksException { this.dataset = dataset; this.index = index; this.metadataProvider = metadataProvider; this.itemType = (ARecordType) metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName()); this.metaType = DatasetUtil.getMetaType(metadataProvider, dataset); Pair<ARecordType, ARecordType> enforcedTypes = getEnforcedType(index, itemType, metaType); this.enforcedItemType = enforcedTypes.first; this.enforcedMetaType = enforcedTypes.second; this.sourceLoc = sourceLoc; this.sortNumFrames = getSortNumFrames(metadataProvider, sourceLoc); }
public static JobSpecification buildSecondaryIndexLoadingJobSpec(Dataset dataset, Index index, MetadataProvider metadataProvider, List<ExternalFile> files, SourceLocation sourceLoc) throws AlgebricksException { SecondaryIndexOperationsHelper secondaryIndexHelper; if (dataset.isCorrelated()) { secondaryIndexHelper = SecondaryCorrelatedTreeIndexOperationsHelper.createIndexOperationsHelper(dataset, index, metadataProvider, sourceLoc); } else { secondaryIndexHelper = SecondaryTreeIndexOperationsHelper.createIndexOperationsHelper(dataset, index, metadataProvider, sourceLoc); } if (files != null) { secondaryIndexHelper.setExternalFiles(files); } return secondaryIndexHelper.buildLoadingJobSpec(); }
public static SecondaryIndexOperationsHelper createIndexOperationsHelper(Dataset dataset, Index index, MetadataProvider metadataProvider, SourceLocation sourceLoc) throws AlgebricksException { SecondaryIndexOperationsHelper indexOperationsHelper; switch (index.getIndexType()) { case BTREE: indexOperationsHelper = new SecondaryBTreeOperationsHelper(dataset, index, metadataProvider, sourceLoc); break; case RTREE: indexOperationsHelper = new SecondaryRTreeOperationsHelper(dataset, index, metadataProvider, sourceLoc); break; case SINGLE_PARTITION_WORD_INVIX: case SINGLE_PARTITION_NGRAM_INVIX: case LENGTH_PARTITIONED_WORD_INVIX: case LENGTH_PARTITIONED_NGRAM_INVIX: indexOperationsHelper = new SecondaryInvertedIndexOperationsHelper(dataset, index, metadataProvider, sourceLoc); break; default: throw new CompilationException(ErrorCode.COMPILATION_UNKNOWN_INDEX_TYPE, sourceLoc, index.getIndexType()); } indexOperationsHelper.init(); return indexOperationsHelper; }
private void setFilterTypeTraitsAndComparators() throws AlgebricksException { filterTypeTraits = new ITypeTraits[numFilterFields]; filterCmpFactories = new IBinaryComparatorFactory[numFilterFields]; secondaryFilterFields = new int[numFilterFields]; primaryFilterFields = new int[numFilterFields]; primaryBTreeFields = new int[numPrimaryKeys + 1]; secondaryBTreeFields = new int[index.getKeyFieldNames().size() + numPrimaryKeys]; for (int i = 0; i < primaryBTreeFields.length; i++) { primaryBTreeFields[i] = i; } for (int i = 0; i < secondaryBTreeFields.length; i++) { secondaryBTreeFields[i] = i; } IAType type = itemType.getSubFieldType(filterFieldName); filterCmpFactories[0] = BinaryComparatorFactoryProvider.INSTANCE.getBinaryComparatorFactory(type, true); filterTypeTraits[0] = TypeTraitProvider.INSTANCE.getTypeTrait(type); secondaryFilterFields[0] = getNumSecondaryKeys() + numPrimaryKeys; primaryFilterFields[0] = numPrimaryKeys + 1; }
protected AlgebricksMetaOperatorDescriptor createCastOp(JobSpecification spec, DatasetType dsType, boolean strictCast) throws AlgebricksException { int[] outColumns = new int[1]; int[] projectionList = new int[(dataset.hasMetaPart() ? 2 : 1) + numPrimaryKeys]; int recordIdx; //external datascan operator returns a record as the first field, instead of the last in internal case if (dsType == DatasetType.EXTERNAL) { recordIdx = 0; outColumns[0] = 0; } else { recordIdx = numPrimaryKeys; outColumns[0] = numPrimaryKeys; } for (int i = 0; i <= numPrimaryKeys; i++) { projectionList[i] = i; } if (dataset.hasMetaPart()) { projectionList[numPrimaryKeys + 1] = numPrimaryKeys + 1; } IScalarEvaluatorFactory[] castEvalFact = new IScalarEvaluatorFactory[] { new ColumnAccessEvalFactory(recordIdx) }; IScalarEvaluatorFactory[] sefs = new IScalarEvaluatorFactory[1]; sefs[0] = createCastFunction(strictCast).createEvaluatorFactory(castEvalFact); AssignRuntimeFactory castAssign = new AssignRuntimeFactory(outColumns, sefs, projectionList); castAssign.setSourceLocation(sourceLoc); return new AlgebricksMetaOperatorDescriptor(spec, 1, 1, new IPushRuntimeFactory[] { castAssign }, new RecordDescriptor[] { enforcedRecDesc }); }
public static SecondaryIndexOperationsHelper createIndexOperationsHelper(Dataset dataset, Index index, MetadataProvider metadataProvider, SourceLocation sourceLoc) throws AlgebricksException { SecondaryIndexOperationsHelper indexOperationsHelper; switch (index.getIndexType()) { case BTREE: indexOperationsHelper = new SecondaryCorrelatedBTreeOperationsHelper(dataset, index, metadataProvider, sourceLoc); break; case RTREE: indexOperationsHelper = new SecondaryCorrelatedRTreeOperationsHelper(dataset, index, metadataProvider, sourceLoc); break; case SINGLE_PARTITION_WORD_INVIX: case SINGLE_PARTITION_NGRAM_INVIX: case LENGTH_PARTITIONED_WORD_INVIX: case LENGTH_PARTITIONED_NGRAM_INVIX: indexOperationsHelper = new SecondaryCorrelatedInvertedIndexOperationsHelper(dataset, index, metadataProvider, sourceLoc); break; default: throw new CompilationException(ErrorCode.COMPILATION_UNKNOWN_INDEX_TYPE, sourceLoc, index.getIndexType()); } indexOperationsHelper.init(); return indexOperationsHelper; }
public static JobSpecification buildDropIndexJobSpec(Index index, MetadataProvider metadataProvider, Dataset dataset, SourceLocation sourceLoc) throws AlgebricksException { SecondaryIndexOperationsHelper secondaryIndexHelper = SecondaryIndexOperationsHelper.createIndexOperationsHelper(dataset, index, metadataProvider, sourceLoc); return secondaryIndexHelper.buildDropJobSpec(EnumSet.noneOf(DropOption.class)); }