private AbstractOperatorDescriptor createTokenizerOp(JobSpecification spec) throws AlgebricksException {
int docField = NUM_TAG_FIELDS;
int numSecondaryKeys = index.getKeyFieldNames().size();
int[] keyFields = new int[NUM_TAG_FIELDS + numPrimaryKeys + numFilterFields];
for (int i = 0; i < NUM_TAG_FIELDS; i++) {
keyFields[i] = i;
}
for (int i = NUM_TAG_FIELDS; i < keyFields.length; i++) {
keyFields[i] = i + numSecondaryKeys;
}
BinaryTokenizerOperatorDescriptor tokenizerOp = new BinaryTokenizerOperatorDescriptor(spec,
getTaggedRecordDescriptor(tokenKeyPairRecDesc), tokenizerFactory, docField, keyFields, isPartitioned,
false, true, MissingWriterFactory.INSTANCE);
AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, tokenizerOp,
primaryPartitionConstraint);
return tokenizerOp;
}