private RelNode projectLeftOuterSide(RelNode srcRel, int numColumns) throws SemanticException { RowResolver iRR = relToHiveRR.get(srcRel); RowResolver oRR = new RowResolver(); RowResolver.add(oRR, iRR, numColumns); List<RexNode> calciteColLst = new ArrayList<RexNode>(); List<String> oFieldNames = new ArrayList<String>(); RelDataType iType = srcRel.getRowType(); for (int i = 0; i < iType.getFieldCount(); i++) { RelDataTypeField fType = iType.getFieldList().get(i); String fName = iType.getFieldNames().get(i); calciteColLst.add(cluster.getRexBuilder().makeInputRef(fType.getType(), i)); oFieldNames.add(fName); } HiveRelNode selRel = HiveProject.create(srcRel, calciteColLst, oFieldNames); this.relToHiveColNameCalcitePosMap.put(selRel, buildHiveToCalciteColumnMap(oRR, selRel)); this.relToHiveRR.put(selRel, oRR); return selRel; }
@Override public Object[] apply(final RelDataTypeField field) { final RelDataType type = field.getType(); boolean isNumeric = SqlTypeName.NUMERIC_TYPES.contains(type.getSqlTypeName()); boolean isCharacter = SqlTypeName.CHAR_TYPES.contains(type.getSqlTypeName()); boolean isDateTime = SqlTypeName.DATETIME_TYPES.contains(type.getSqlTypeName()); return new Object[]{ CATALOG_NAME, // TABLE_CATALOG schemaName, // TABLE_SCHEMA tableName, // TABLE_NAME field.getName(), // COLUMN_NAME String.valueOf(field.getIndex()), // ORDINAL_POSITION "", // COLUMN_DEFAULT type.isNullable() ? "YES" : "NO", // IS_NULLABLE type.getSqlTypeName().toString(), // DATA_TYPE null, // CHARACTER_MAXIMUM_LENGTH null, // CHARACTER_OCTET_LENGTH isNumeric ? String.valueOf(type.getPrecision()) : null, // NUMERIC_PRECISION isNumeric ? "10" : null, // NUMERIC_PRECISION_RADIX isNumeric ? String.valueOf(type.getScale()) : null, // NUMERIC_SCALE isDateTime ? String.valueOf(type.getPrecision()) : null, // DATETIME_PRECISION isCharacter ? type.getCharset().name() : null, // CHARACTER_SET_NAME isCharacter ? type.getCollation().getCollationName() : null, // COLLATION_NAME Long.valueOf(type.getSqlTypeName().getJdbcOrdinal()) // JDBC_TYPE (Druid extension) }; } }
/** * Create a cast for partition column. Partition column is output as "VARCHAR" in native parquet reader. Cast it * appropriate type according the partition type in HiveScan. */ private RexNode createPartitionColumnCast(final DrillScanRel hiveScanRel, final DrillScanRel nativeScanRel, final String outputColName, final String dirColName, final RexBuilder rb) { final RelDataType outputType = hiveScanRel.getRowType().getField(outputColName, false, false).getType(); final RelDataTypeField inputField = nativeScanRel.getRowType().getField(dirColName, false, false); final RexInputRef inputRef = rb.makeInputRef(rb.getTypeFactory().createSqlType(SqlTypeName.VARCHAR), inputField.getIndex()); if (outputType.getSqlTypeName() == SqlTypeName.CHAR) { return rb.makeCall(RTRIM, inputRef); } return rb.makeCast(outputType, inputRef); } }
/** Returns whether one type is just a widening of another. * * <p>For example:<ul> * <li>{@code VARCHAR(10)} is a widening of {@code VARCHAR(5)}. * <li>{@code VARCHAR(10)} is a widening of {@code VARCHAR(10) NOT NULL}. * </ul> */ private boolean isWidening(RelDataType type, RelDataType type1) { return type.getSqlTypeName() == type1.getSqlTypeName() && type.getPrecision() >= type1.getPrecision(); }
ColumnRowType buildColumnRowType() { ArrayList<TblColRef> colRefs = Lists.newArrayListWithCapacity(rowType.getFieldCount()); for(RelDataTypeField r:rowType.getFieldList()){ colRefs.add(TblColRef.newInnerColumn(r.getName(), TblColRef.InnerDataTypeEnum.LITERAL)); } return new ColumnRowType(colRefs); }
public static ExprNodeDesc getExprNode(Integer inputRefIndx, RelNode inputRel, ExprNodeConverter exprConv) { ExprNodeDesc exprNode = null; RexNode rexInputRef = new RexInputRef(inputRefIndx, inputRel.getRowType() .getFieldList().get(inputRefIndx).getType()); exprNode = rexInputRef.accept(exprConv); return exprNode; }
public String generateSql() { SqlDialect dialect = getJdbcDialect(); final HiveJdbcImplementor jdbcImplementor = new HiveJdbcImplementor(dialect, (JavaTypeFactory) getCluster().getTypeFactory()); Project topProject; if (getInput() instanceof Project) { topProject = (Project) getInput(); } else { // If it is not a project operator, we add it on top of the input // to force generating the column names instead of * while // translating to SQL RelNode nodeToTranslate = getInput(); RexBuilder builder = getCluster().getRexBuilder(); List<RexNode> projects = new ArrayList<>( nodeToTranslate.getRowType().getFieldList().size()); for (int i = 0; i < nodeToTranslate.getRowType().getFieldCount(); i++) { projects.add(builder.makeInputRef(nodeToTranslate, i)); } topProject = new JdbcProject(nodeToTranslate.getCluster(), nodeToTranslate.getTraitSet(), nodeToTranslate, projects, nodeToTranslate.getRowType()); } final HiveJdbcImplementor.Result result = jdbcImplementor.translate(topProject); return result.asStatement().toSqlString(dialect).getSql(); }
private RexNode decorrFieldAccess(RexFieldAccess fieldAccess) { int newInputOutputOffset = 0; for (RelNode input : currentRel.getInputs()) { final Frame frame = map.get(input); if (frame != null) { // try to find in this input rel the position of cor var final CorRef corRef = cm.mapFieldAccessToCorRef.get(fieldAccess); if (corRef != null) { Integer newInputPos = frame.corDefOutputs.get(corRef.def()); if (newInputPos != null) { // This input rel does produce the cor var referenced. // Assume fieldAccess has the correct type info. return new RexInputRef(newInputPos + newInputOutputOffset, frame.r.getRowType().getFieldList().get(newInputPos) .getType()); } } // this input rel does not produce the cor var needed newInputOutputOffset += frame.r.getRowType().getFieldCount(); } else { // this input rel is not rewritten newInputOutputOffset += input.getRowType().getFieldCount(); } } return fieldAccess; } }
private RelNode createFirstGB(RelNode input, boolean left, RelOptCluster cluster, RexBuilder rexBuilder) throws CalciteSemanticException { final List<RexNode> gbChildProjLst = Lists.newArrayList(); final List<Integer> groupSetPositions = Lists.newArrayList(); for (int cInd = 0; cInd < input.getRowType().getFieldList().size(); cInd++) { gbChildProjLst.add(rexBuilder.makeInputRef(input, cInd)); groupSetPositions.add(cInd); } if (left) { gbChildProjLst.add(rexBuilder.makeBigintLiteral(new BigDecimal(2))); } else { gbChildProjLst.add(rexBuilder.makeBigintLiteral(new BigDecimal(1))); } // also add the last VCol groupSetPositions.add(input.getRowType().getFieldList().size()); // create the project before GB RelNode gbInputRel = HiveProject.create(input, gbChildProjLst, null); // groupSetPosition includes all the positions final ImmutableBitSet groupSet = ImmutableBitSet.of(groupSetPositions); List<AggregateCall> aggregateCalls = Lists.newArrayList(); RelDataType aggFnRetType = TypeConverter.convert(TypeInfoFactory.longTypeInfo, cluster.getTypeFactory()); AggregateCall aggregateCall = HiveCalciteUtil.createSingleArgAggCall("count", cluster, TypeInfoFactory.longTypeInfo, input.getRowType().getFieldList().size(), aggFnRetType); aggregateCalls.add(aggregateCall); return new HiveAggregate(cluster, cluster.traitSetOf(HiveRelNode.CONVENTION), gbInputRel, groupSet, null, aggregateCalls); }
private static List<RexNode> getNotNullConditions(RelOptCluster cluster, RexBuilder rexBuilder, RelNode input, Set<Integer> inputKeyPositions, Set<String> pushedPredicates) { final List<RexNode> newConditions = Lists.newArrayList(); for (int pos : inputKeyPositions) { RelDataType keyType = input.getRowType().getFieldList().get(pos).getType(); // Nothing to do if key cannot be null if (!keyType.isNullable()) { continue; } RexNode cond = rexBuilder.makeCall(SqlStdOperatorTable.IS_NOT_NULL, rexBuilder.makeInputRef(input, pos)); String digest = cond.toString(); if (pushedPredicates.add(digest)) { newConditions.add(cond); } } return newConditions; }
public static List<ColumnMetaData> createColumnMetaData(final RelDataType rowType) List<RelDataTypeField> fieldList = rowType.getFieldList(); final ColumnMetaData.Rep rep = QueryMaker.rep(field.getType().getSqlTypeName()); final ColumnMetaData.ScalarType columnType = ColumnMetaData.scalar( field.getType().getSqlTypeName().getJdbcOrdinal(), field.getType().getSqlTypeName().getName(), rep ); false, // searchable false, // currency field.getType().isNullable() ? DatabaseMetaData.columnNullable field.getType().getPrecision(), // display size field.getName(), // label field.getType().getPrecision(), // precision field.getType().getScale(), // scale
private void checkFieldCount(SqlNode node, SqlValidatorTable table, SqlNode source, RelDataType logicalSourceRowType, RelDataType logicalTargetRowType) { final int sourceFieldCount = logicalSourceRowType.getFieldCount(); final int targetFieldCount = logicalTargetRowType.getFieldCount(); if (sourceFieldCount != targetFieldCount) { throw newValidationError(node, final List<ColumnStrategy> strategies = table.unwrap(RelOptTable.class).getColumnStrategies(); for (final RelDataTypeField field : table.getRowType().getFieldList()) { final RelDataTypeField targetField = logicalTargetRowType.getField(field.getName(), true, false); switch (strategies.get(field.getIndex())) { case NOT_NULLABLE: assert !field.getType().isNullable(); if (targetField == null) { throw newValidationError(node, assert field.getType().isNullable(); break; case VIRTUAL:
public static HiveTableFunctionScan createUDTFForSetOp(RelOptCluster cluster, RelNode input) throws SemanticException { RelTraitSet traitSet = TraitsUtil.getDefaultTraitSet(cluster); List<RexNode> originalInputRefs = Lists.transform(input.getRowType().getFieldList(), new Function<RelDataTypeField, RexNode>() { @Override public RexNode apply(RelDataTypeField input) { return new RexInputRef(input.getIndex(), input.getType()); } }); ImmutableList.Builder<RelDataType> argTypeBldr = ImmutableList.<RelDataType> builder(); for (int i = 0; i < originalInputRefs.size(); i++) { argTypeBldr.add(originalInputRefs.get(i).getType()); } RelDataType retType = input.getRowType(); String funcName = "replicate_rows"; FunctionInfo fi = FunctionRegistry.getFunctionInfo(funcName); SqlOperator calciteOp = SqlFunctionConverter.getCalciteOperator(funcName, fi.getGenericUDTF(), argTypeBldr.build(), retType); // Hive UDTF only has a single input List<RelNode> list = new ArrayList<>(); list.add(input); RexNode rexNode = cluster.getRexBuilder().makeCall(calciteOp, originalInputRefs); return HiveTableFunctionScan.create(cluster, traitSet, list, rexNode, null, retType, null); }
int rowIDPos = tableScan.getTable().getRowType().getField( VirtualColumn.ROWID.getName(), false, false).getIndex(); RexNode rowIDFieldAccess = rexBuilder.makeFieldAccess( rexBuilder.makeInputRef(tableScan.getTable().getRowType().getFieldList().get(rowIDPos).getType(), rowIDPos), 0); List<RexNode> conds = new ArrayList<>(); RelDataType bigIntType = relBuilder.getTypeFactory().createSqlType(SqlTypeName.BIGINT); final RexNode literalHighWatermark = rexBuilder.makeLiteral( tableMaterializationTxnList.getHighWatermark(), bigIntType, false); conds.add(
public RexNode visitInputRef(RexInputRef inputRef) { final RelDataType leftRowType = left.getRowType(); final RexBuilder rexBuilder = getRexBuilder(); final int leftCount = leftRowType.getFieldCount(); if (inputRef.getIndex() < leftCount) { final RexNode v = rexBuilder.makeCorrel(leftRowType, id); return rexBuilder.makeFieldAccess(v, inputRef.getIndex()); } else { return rexBuilder.makeInputRef(right, inputRef.getIndex() - leftCount); } } }
Set<RelDataTypeField> extraFields) { if ((aggregate.getIndicatorCount() > 0) || (aggregate.getGroupSet().isEmpty()) || fieldsUsed.contains(aggregate.getGroupSet())) { return aggregate; final RelDataType rowType = input.getRowType(); RexBuilder rexBuilder = aggregate.getCluster().getRexBuilder(); final List<RexNode> newProjects = new ArrayList<>(); final List<RexNode> inputExprs = input.getChildExps(); if (inputExprs == null || inputExprs.isEmpty()) { return aggregate; for (int i = 0; i < rowType.getFieldCount(); i++) { if (aggregate.getGroupSet().get(i)) { newProjects.add(rexBuilder.makeLiteral(true)); } else { newProjects.add(rexBuilder.makeInputRef(input, i));
private List<RexNode> projects(RelDataType inputRowType) { final List<RexNode> exprList = new ArrayList<>(); for (RelDataTypeField field : inputRowType.getFieldList()) { final RexBuilder rexBuilder = cluster.getRexBuilder(); exprList.add(rexBuilder.makeInputRef(field.getType(), field.getIndex())); } return exprList; }
@Test public void testGetTableMapFoo2() { final DruidTable fooTable = (DruidTable) schema.getTableMap().get("foo2"); final RelDataType rowType = fooTable.getRowType(new JavaTypeFactoryImpl()); final List<RelDataTypeField> fields = rowType.getFieldList(); Assert.assertEquals(3, fields.size()); Assert.assertEquals("__time", fields.get(0).getName()); Assert.assertEquals(SqlTypeName.TIMESTAMP, fields.get(0).getType().getSqlTypeName()); Assert.assertEquals("dim2", fields.get(1).getName()); Assert.assertEquals(SqlTypeName.VARCHAR, fields.get(1).getType().getSqlTypeName()); Assert.assertEquals("m1", fields.get(2).getName()); Assert.assertEquals(SqlTypeName.BIGINT, fields.get(2).getType().getSqlTypeName()); } }
if (conformance.isInsertSubsetColumnsAllowed() && targetRowType.isStruct() && rowConstructor.operandCount() < targetRowType.getFieldCount()) { targetRowType = typeFactory.createStructType( targetRowType.getFieldList() .subList(0, rowConstructor.operandCount())); } else if (targetRowType.isStruct() && rowConstructor.operandCount() != targetRowType.getFieldCount()) { return; rowConstructor); if (targetRowType.isStruct()) { for (Pair<SqlNode, RelDataTypeField> pair : Pair.zip(rowConstructor.getOperandList(), targetRowType.getFieldList())) { if (!pair.right.getType().isNullable() && SqlUtil.isNullLiteral(pair.left, false)) { throw newValidationError(node, RESOURCE.columnNotNullable(pair.right.getName()));
/** * Apply any data format conversion expressions. */ private RexNode createColumnFormatConversion(final DrillScanRel hiveScanRel, final DrillScanRel nativeScanRel, final String colName, final RexBuilder rb) { final RelDataType outputType = hiveScanRel.getRowType().getField(colName, false, false).getType(); final RelDataTypeField inputField = nativeScanRel.getRowType().getField(colName, false, false); final RexInputRef inputRef = rb.makeInputRef(inputField.getType(), inputField.getIndex()); if (outputType.getSqlTypeName() == SqlTypeName.TIMESTAMP) { // TIMESTAMP is stored as INT96 by Hive in ParquetFormat. Use convert_fromTIMESTAMP_IMPALA UDF to convert // INT96 format data to TIMESTAMP // TODO: Remove this conversion once "store.parquet.reader.int96_as_timestamp" will be true by default return rb.makeCall(INT96_TO_TIMESTAMP, inputRef); } return inputRef; }