private SchemaPath fieldName2SchemaPath(String fieldName) { if (fieldName.contains(":")) { fieldName = fieldName.split(":")[1]; } if (fieldName.contains(".")) { return FieldPathHelper.fieldPath2SchemaPath(FieldPath.parseFrom(fieldName)); } return SchemaPath.getSimplePath(fieldName); }
/** * Parses input string using the same rules which are used for the field in the query. * If a string contains dot outside back-ticks, or there are no backticks in the string, * will be created {@link SchemaPath} with the {@link NameSegment} * which contains one else {@link NameSegment}, etc. * If a string contains [] then {@link ArraySegment} will be created. * * @param expr input string to be parsed * @return {@link SchemaPath} instance */ public static SchemaPath parseFromString(String expr) { if (expr == null || expr.isEmpty()) { return null; } if (SchemaPath.DYNAMIC_STAR.equals(expr)) { return SchemaPath.getSimplePath(expr); } LogicalExpression logicalExpression = LogicalExpressionParser.parse(expr); if (logicalExpression instanceof SchemaPath) { return (SchemaPath) logicalExpression; } else { throw new IllegalStateException(String.format("Schema path is not a valid format: %s.", logicalExpression)); } }
private void init() { int count = 0; for (LogicalExpression indexedExpr : indexDesc.getIndexColumns()) { if (!(indexedExpr instanceof SchemaPath)) { hasFunctionalField = true; SchemaPath functionalFieldPath = SchemaPath.getSimplePath("$"+count); newPathsForIndexedFunction.add(functionalFieldPath); // now we handle only cast expression if (indexedExpr instanceof CastExpression) { // We handle only CAST directly on SchemaPath for now. SchemaPath pathBeingCasted = (SchemaPath)((CastExpression) indexedExpr).getInput(); addTargetPathForOriginalPath(pathBeingCasted, functionalFieldPath); addPathInExpr(indexedExpr, pathBeingCasted); exprToConvert.put(indexedExpr, functionalFieldPath); allPathsInFunction.add(pathBeingCasted); } count++; } } }
private MapVector getOrCreateFamilyVector(String familyName, boolean allocateOnCreate) { try { MapVector v = familyVectorMap.get(familyName); if(v == null) { SchemaPath column = SchemaPath.getSimplePath(familyName); MaterializedField field = MaterializedField.create(column.getAsNamePart().getName(), COLUMN_FAMILY_TYPE); v = outputMutator.addField(field, MapVector.class); if (allocateOnCreate) { v.allocateNew(); } getColumns().add(column); familyVectorMap.put(familyName, v); } return v; } catch (SchemaChangeException e) { throw new DrillRuntimeException(e); } }
final String partitionCol = partitionColMapping.get(colName.getRootSegmentPath()); if (partitionCol != null) { nativeScanCols.add(SchemaPath.getSimplePath(partitionCol)); } else { nativeScanCols.add(colName);
NameSegment root = column.getRootSegment(); byte[] family = root.getPath().getBytes(); transformed.add(SchemaPath.getSimplePath(root.getPath())); PathSegment child = root.getChild(); if (child != null && child.isNamed()) {
@Test public void testXpath_Double() throws Exception { final String query = "select xpath_double ('<a><b>20</b><c>40</c></a>', 'a/b * a/c') as col \n" + "from hive.kv \n" + "limit 0"; final TypeProtos.MajorType majorType = TypeProtos.MajorType.newBuilder() .setMinorType(TypeProtos.MinorType.FLOAT8) .setMode(TypeProtos.DataMode.OPTIONAL) .build(); final List<Pair<SchemaPath, TypeProtos.MajorType>> expectedSchema = Lists.newArrayList(); expectedSchema.add(Pair.of(SchemaPath.getSimplePath("col"), majorType)); testBuilder() .sqlQuery(query) .schemaBaseLine(expectedSchema) .build() .run(); }
List<LogicalExpression> indexFields = field2SchemaPath(desc.getIndexedFields()); List<LogicalExpression> coveringFields = field2SchemaPath(desc.getIncludedFields()); coveringFields.add(SchemaPath.getSimplePath("_id")); CollationContext collationContext = null; if (!desc.isHashed()) { // hash index has no collation property
@Override public Integer getIdIfValid(String name) { SchemaPath schemaPath = SchemaPath.getSimplePath(name); int id = partitionColumns.indexOf(schemaPath); if (id == -1) { return null; } return id; }
public SequenceFileRecordReader(final FileSplit split, final DrillFileSystem dfs, final String queryUserName, final String opUserName) { final List<SchemaPath> columns = new ArrayList<>(); columns.add(SchemaPath.getSimplePath(keySchema)); columns.add(SchemaPath.getSimplePath(valueSchema)); setColumns(columns); this.dfs = dfs; this.split = split; this.queryUserName = queryUserName; this.opUserName = opUserName; }
private SchemaPath fieldName2SchemaPath(String fieldName) { if (fieldName.contains(":")) { fieldName = fieldName.split(":")[1]; } if (fieldName.contains(".")) { return FieldPathHelper.fieldPath2SchemaPath(FieldPath.parseFrom(fieldName)); } return SchemaPath.getSimplePath(fieldName); }
private static List<SchemaPath> getProjectedColumns(final RelOptTable table, boolean isSelectStar) { List<String> columnNames = table.getRowType().getFieldNames(); List<SchemaPath> projectedColumns = new ArrayList<SchemaPath>(columnNames.size()); for (String columnName : columnNames) { projectedColumns.add(SchemaPath.getSimplePath(columnName)); } // If the row-type doesn't contain the STAR keyword, then insert it // as we are dealing with a SELECT_STAR query. if (isSelectStar && !Utilities.isStarQuery(projectedColumns)) { projectedColumns.add(SchemaPath.STAR_COLUMN); } return projectedColumns; }
public UnnestPrel(RelOptCluster cluster, RelTraitSet traits, RelDataType rowType, RexNode ref) { super(cluster, traits, ref); this.unnestPOP = new UnnestPOP(null, SchemaPath.getSimplePath(((RexFieldAccess)ref).getField().getName()), DrillUnnestRelBase.IMPLICIT_COLUMN); this.rowType = rowType; }
private SchemaPath getColumn() { if (this.excludeCorrelateColumn) { int index = this.getRequiredColumns().asList().get(0); return SchemaPath.getSimplePath(this.getInput(0).getRowType().getFieldNames().get(index)); } return null; }
@Override public boolean isStarQuery() { return super.isStarQuery() || Iterables.tryFind(getColumns(), new Predicate<SchemaPath>() { private final SchemaPath COLUMNS = SchemaPath.getSimplePath("columns"); @Override public boolean apply(@Nullable SchemaPath path) { return path.equals(COLUMNS); } }).isPresent(); }
private MapVector getOrCreateFamilyVector(String familyName, boolean allocateOnCreate) { try { MapVector v = familyVectorMap.get(familyName); if(v == null) { SchemaPath column = SchemaPath.getSimplePath(familyName); MaterializedField field = MaterializedField.create(column.getAsNamePart().getName(), COLUMN_FAMILY_TYPE); v = outputMutator.addField(field, MapVector.class); if (allocateOnCreate) { v.allocateNew(); } getColumns().add(column); familyVectorMap.put(familyName, v); } return v; } catch (SchemaChangeException e) { throw new DrillRuntimeException(e); } }
private List<NamedExpression> getExpressionList() { List<NamedExpression> exprs = Lists.newArrayList(); for (MaterializedField field : incoming.getSchema()) { String fieldName = field.getName(); if (fieldName.equals(popConfig.getColumn().getRootSegmentPath())) { continue; } exprs.add(new NamedExpression(SchemaPath.getSimplePath(fieldName), new FieldReference(fieldName))); } return exprs; }
@Override public LogicalOperator implement(DrillImplementor implementor) { if(getRef() instanceof RexFieldAccess) { final RexFieldAccess fldAccess = (RexFieldAccess)getRef(); return new Unnest(SchemaPath.getSimplePath(fldAccess.getField().getName())); } return null; }
private VectorContainer constructHyperBatch(List<BatchGroup> batchGroupList) { VectorContainer cont = new VectorContainer(); for (MaterializedField field : schema) { ValueVector[] vectors = new ValueVector[batchGroupList.size()]; int i = 0; for (BatchGroup group : batchGroupList) { vectors[i++] = group.getValueAccessorById( field.getValueClass(), group.getValueVectorId(SchemaPath.getSimplePath(field.getName())).getFieldIds()) .getValueVector(); } cont.add(vectors); } cont.buildSchema(BatchSchema.SelectionVectorMode.FOUR_BYTE); return cont; }
@Override public void updateSchema(VectorAccessible batch) throws IOException { if (this.batchSchema == null || !this.batchSchema.equals(batch.getSchema()) || containsComplexVectors(this.batchSchema)) { if (this.batchSchema != null) { flush(); } this.batchSchema = batch.getSchema(); newSchema(); } TypedFieldId fieldId = batch.getValueVectorId(SchemaPath.getSimplePath(WriterPrel.PARTITION_COMPARATOR_FIELD)); if (fieldId != null) { VectorWrapper w = batch.getValueAccessorById(BitVector.class, fieldId.getFieldIds()); setPartitionVector((BitVector) w.getValueVector()); } }