@Override protected int[] createFieldPermutationForBulkLoadOp() { if (isPointMBR) { int[] fieldPermutation = new int[NUM_TAG_FIELDS + numNestedSecondaryKeyFields + numPrimaryKeys + numFilterFields]; int idx = 0; int numSecondaryKeyFieldsForPointMBR = numNestedSecondaryKeyFields / 2; for (int i = 0; i < NUM_TAG_FIELDS + numSecondaryKeyFieldsForPointMBR; i++) { fieldPermutation[idx++] = i; } //add the rest of the sk fields for pointMBR for (int i = 0; i < numSecondaryKeyFieldsForPointMBR; i++) { fieldPermutation[idx++] = NUM_TAG_FIELDS + i; } //add the pk and filter fields int end = numSecondaryKeyFieldsForPointMBR + numPrimaryKeys + numFilterFields; for (int i = numSecondaryKeyFieldsForPointMBR; i < end; i++) { fieldPermutation[idx++] = NUM_TAG_FIELDS + i; } return fieldPermutation; } else { return super.createFieldPermutationForBulkLoadOp(); } } }
protected int[] createFieldPermutationForBulkLoadOp() { int[] fieldPermutation = new int[NUM_TAG_FIELDS + getNumSecondaryKeys() + numPrimaryKeys + numFilterFields]; for (int i = 0; i < fieldPermutation.length; i++) { fieldPermutation[i] = i; } return fieldPermutation; }
@Override protected AlgebricksMetaOperatorDescriptor createCastOp(JobSpecification spec, DatasetType dsType, boolean strictCast) throws AlgebricksException { int[] outColumns = new int[1]; // tags(2) + primary keys + record + meta part(?) int[] projectionList = new int[NUM_TAG_FIELDS + (dataset.hasMetaPart() ? 2 : 1) + numPrimaryKeys]; int recordIdx = NUM_TAG_FIELDS + numPrimaryKeys; //here we only consider internal dataset assert dsType == DatasetType.INTERNAL; outColumns[0] = NUM_TAG_FIELDS + numPrimaryKeys; int projCount = 0; for (int i = 0; i < NUM_TAG_FIELDS; i++) { projectionList[projCount++] = i; } //set primary keys and the record for (int i = 0; i <= numPrimaryKeys; i++) { projectionList[projCount++] = NUM_TAG_FIELDS + i; } if (dataset.hasMetaPart()) { projectionList[NUM_TAG_FIELDS + numPrimaryKeys + 1] = NUM_TAG_FIELDS + numPrimaryKeys + 1; } IScalarEvaluatorFactory[] castEvalFact = new IScalarEvaluatorFactory[] { new ColumnAccessEvalFactory(recordIdx) }; IScalarEvaluatorFactory[] sefs = new IScalarEvaluatorFactory[1]; sefs[0] = createCastFunction(strictCast).createEvaluatorFactory(castEvalFact); AssignRuntimeFactory castAssign = new AssignRuntimeFactory(outColumns, sefs, projectionList); castAssign.setSourceLocation(sourceLoc); return new AlgebricksMetaOperatorDescriptor(spec, 1, 1, new IPushRuntimeFactory[] { castAssign }, new RecordDescriptor[] { getTaggedRecordDescriptor(enforcedRecDesc) }); }
public static JobSpecification buildSecondaryIndexLoadingJobSpec(Dataset dataset, Index index, MetadataProvider metadataProvider, List<ExternalFile> files, SourceLocation sourceLoc) throws AlgebricksException { SecondaryIndexOperationsHelper secondaryIndexHelper; if (dataset.isCorrelated()) { secondaryIndexHelper = SecondaryCorrelatedTreeIndexOperationsHelper.createIndexOperationsHelper(dataset, index, metadataProvider, sourceLoc); } else { secondaryIndexHelper = SecondaryTreeIndexOperationsHelper.createIndexOperationsHelper(dataset, index, metadataProvider, sourceLoc); } if (files != null) { secondaryIndexHelper.setExternalFiles(files); } return secondaryIndexHelper.buildLoadingJobSpec(); }