private static String applyTimestampFormat(String arg, String format, TimeZone timeZone) { return DruidExpressions.functionCall("timestamp_format", ImmutableList.of(arg, DruidExpressions.stringLiteral(format), DruidExpressions.stringLiteral(timeZone.getID())) ); }
@Override public String toDruidExpression(RexNode rexNode, RelDataType rowType, DruidQuery druidQuery) { final RexCall call = (RexCall) rexNode; final List<String> druidExpressions = DruidExpressions.toDruidExpressions( druidQuery, rowType, call.getOperands()); if (druidExpressions == null) { return null; } return DruidQuery.format("%s(%s)", druidOperator, Iterables.getOnlyElement(druidExpressions)); } }
@Nullable @Override public String toDruidExpression(RexNode rexNode, RelDataType rowType, DruidQuery query ) { final RexCall call = (RexCall) rexNode; if (call.getOperands().size() < 1 || call.getOperands().size() > 2) { throw new IllegalStateException("form_unixtime() requires 1 or 2 argument, got " + call.getOperands().size()); } final String arg = DruidExpressions.toDruidExpression(call.getOperands().get(0), rowType, query); if (arg == null) { return null; } final String numMillis = DruidQuery.format("(%s * '1000')", arg); final String format = call.getOperands().size() == 1 ? DruidExpressions.stringLiteral(DEFAULT_TS_FORMAT) : DruidExpressions .toDruidExpression(call.getOperands().get(1), rowType, query); return DruidExpressions.functionCall("timestamp_format", ImmutableList.of(numMillis, format, DruidExpressions.stringLiteral(TimeZone.getTimeZone("UTC").getID())) ); } }
private RelNode copyNodeScan(RelNode scan) { final RelNode newScan; if (scan instanceof DruidQuery) { final DruidQuery dq = (DruidQuery) scan; // Ideally we should use HiveRelNode convention. However, since Volcano planner // throws in that case because DruidQuery does not implement the interface, // we set it as Bindable. Currently, we do not use convention in Hive, hence that // should be fine. // TODO: If we want to make use of convention (e.g., while directly generating operator // tree instead of AST), this should be changed. newScan = DruidQuery.create(optCluster, optCluster.traitSetOf(BindableConvention.INSTANCE), scan.getTable(), dq.getDruidTable(), ImmutableList.<RelNode>of(dq.getTableScan())); } else { newScan = new HiveTableScan(optCluster, optCluster.traitSetOf(HiveRelNode.CONVENTION), (RelOptHiveTable) scan.getTable(), ((RelOptHiveTable) scan.getTable()).getName(), null, false, false); } return newScan; } }
public static ASTNode table(RelNode scan) { HiveTableScan hts; if (scan instanceof DruidQuery) { hts = (HiveTableScan) ((DruidQuery)scan).getTableScan(); } else { hts = (HiveTableScan) scan; .add(HiveParser.StringLiteral, "\"" + Constants.DRUID_QUERY_JSON + "\"") .add(HiveParser.StringLiteral, "\"" + SemanticAnalyzer.escapeSQLString( dq.getQueryString()) + "\"")); propList.add(ASTBuilder.construct(HiveParser.TOK_TABLEPROPERTY, "TOK_TABLEPROPERTY") .add(HiveParser.StringLiteral, "\"" + Constants.DRUID_QUERY_TYPE + "\"") .add(HiveParser.StringLiteral, "\"" + dq.getQueryType().getQueryName() + "\""));
Schema(DruidQuery dq) { HiveTableScan hts = (HiveTableScan) ((DruidQuery)dq).getTableScan(); String tabName = hts.getTableAlias(); for (RelDataTypeField field : dq.getRowType().getFieldList()) { add(new ColumnInfo(tabName, field.getName())); } }
@Nullable @Override public String toDruidExpression(RexNode rexNode, RelDataType rowType, DruidQuery query ) { final RexCall call = (RexCall) rexNode; final String arg0 = DruidExpressions.toDruidExpression(call.getOperands().get(0), rowType, query); if (arg0 == null) { return null; } if (SqlTypeUtil.isDatetime((call.getOperands().get(0).getType()))) { // Timestamp is represented as long internally no need to any thing here return DruidExpressions.functionCall("div", ImmutableList.of(arg0, DruidExpressions.numberLiteral(1000))); } // dealing with String type final String format = call.getOperands().size() == 2 ? DruidExpressions .toDruidExpression(call.getOperands().get(1), rowType, query) : DEFAULT_TS_FORMAT; return DruidExpressions .functionCall("unix_timestamp", ImmutableList.of(arg0, DruidExpressions.stringLiteral(format))); } }
private static RelNode project(DruidQuery dq, ImmutableBitSet fieldsUsed, Set<RelDataTypeField> extraFields, RelBuilder relBuilder) { final int fieldCount = dq.getRowType().getFieldCount(); if (fieldsUsed.equals(ImmutableBitSet.range(fieldCount)) && extraFields.isEmpty()) { final RexBuilder rexBuilder = dq.getCluster().getRexBuilder(); final List<RelDataTypeField> fields = dq.getRowType().getFieldList();
@Nullable @Override public String toDruidExpression(RexNode rexNode, RelDataType rowType, DruidQuery query ) { final RexCall call = (RexCall) rexNode; final String arg = DruidExpressions.toDruidExpression(call.getOperands().get(0), rowType, query); if (arg == null) { return null; final String indexExp = DruidExpressions.toDruidExpression(call.getOperands().get(1), rowType, query); if (indexExp == null) { return null; indexStart = DruidQuery.format("(%s - 1)", indexExp); } else { final int index = RexLiteral.intValue(call.getOperands().get(1)) - 1; indexStart = DruidExpressions.numberLiteral(index); length = DruidExpressions.toDruidExpression(call.getOperands().get(2), rowType, query); if (length == null) { return null; length = DruidExpressions.numberLiteral(RexLiteral.intValue(call.getOperands().get(2))); length = DruidExpressions.numberLiteral(-1); return DruidQuery.format("substring(%s, %s, %s)", arg, indexStart, length);
public static final Map<SqlOperator, org.apache.calcite.adapter.druid.DruidSqlOperatorConverter> getDefaultMap() { if (druidOperatorMap == null) { druidOperatorMap = new HashMap<SqlOperator, org.apache.calcite.adapter.druid.DruidSqlOperatorConverter>(); DruidQuery.DEFAULT_OPERATORS_LIST.stream().forEach(op -> druidOperatorMap.put(op.calciteOperator(), op)); FloorOperatorConversion() )); druidOperatorMap.putAll(Maps.asMap(HiveExtractDate.ALL_FUNCTIONS, (Function<SqlFunction, org.apache.calcite.adapter.druid.DruidSqlOperatorConverter>) input -> new ExtractOperatorConversion() )); druidOperatorMap.put(HiveConcat.INSTANCE, new DirectOperatorConversion(HiveConcat.INSTANCE, "concat")); druidOperatorMap .put(SqlStdOperatorTable.SUBSTRING, new DruidSqlOperatorConverter.DruidSubstringOperatorConversion());
@Nullable @Override public String toDruidExpression(RexNode rexNode, RelDataType rowType, DruidQuery query ) { final RexCall call = (RexCall) rexNode; if (call.getOperands().size() != 1) { throw new IllegalStateException("to_date() requires 1 argument, got " + call.getOperands().size()); } final String arg = DruidExpressions.toDruidExpression(call.getOperands().get(0), rowType, query); if (arg == null) { return null; } return DruidExpressions.applyTimestampFloor( arg, Period.days(1).toString(), "", timezoneId(query, call.getOperands().get(0))); } }
private static String getTblAlias(RelNode rel) { if (null == rel) { return null; } if (rel instanceof HiveTableScan) { return ((HiveTableScan)rel).getTableAlias(); } if (rel instanceof DruidQuery) { DruidQuery dq = (DruidQuery) rel; return ((HiveTableScan) dq.getTableScan()).getTableAlias(); } if (rel instanceof Project) { return null; } if (rel.getInputs().size() == 1) { return getTblAlias(rel.getInput(0)); } return null; }
final int fieldCount = dq.getRowType().getFieldCount(); if (fieldsUsed.equals(ImmutableBitSet.range(fieldCount)) && extraFields.isEmpty()) {
/** * utility function to extract timezone id from Druid query * @param query Druid Rel * @return time zone */ private static TimeZone timezoneId(final DruidQuery query, final RexNode arg) { return arg.getType().getSqlTypeName() == SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE ? TimeZone.getTimeZone( query.getTopNode().getCluster().getPlanner().getContext().unwrap(CalciteConnectionConfig.class).timeZone()) : TimeZone.getTimeZone("UTC"); }
@Override public RelOptMaterialization apply(RelOptMaterialization materialization) { final RelNode viewScan = materialization.tableRel; final RelNode newViewScan; if (viewScan instanceof DruidQuery) { final DruidQuery dq = (DruidQuery) viewScan; newViewScan = DruidQuery.create(optCluster, optCluster.traitSetOf(HiveRelNode.CONVENTION), viewScan.getTable(), dq.getDruidTable(), ImmutableList.<RelNode>of(dq.getTableScan())); } else { newViewScan = new HiveTableScan(optCluster, optCluster.traitSetOf(HiveRelNode.CONVENTION), (RelOptHiveTable) viewScan.getTable(), viewScan.getTable().getQualifiedName().get(0), null, false, false); } return new RelOptMaterialization(newViewScan, materialization.queryRel, null); } }
@Nullable @Override public String toDruidExpression(RexNode rexNode, RelDataType rowType, DruidQuery query ) { final RexCall call = (RexCall) rexNode; if (call.getOperands().size() != 2) { throw new IllegalStateException("date_add/date_sub() requires 2 arguments, got " + call.getOperands().size()); } final String arg0 = DruidExpressions.toDruidExpression(call.getOperands().get(0), rowType, query); final String arg1 = DruidExpressions.toDruidExpression(call.getOperands().get(1), rowType, query); if (arg0 == null || arg1 == null) { return null; } final String steps = direction == -1 ? DruidQuery.format("-( %s )", arg1) : arg1; return DruidExpressions.functionCall( "timestamp_shift", ImmutableList.of( arg0, DruidExpressions.stringLiteral("P1D"), steps, DruidExpressions.stringLiteral(timezoneId(query, call.getOperands().get(0)).getID()))); } }
Schema(DruidQuery dq) { HiveTableScan hts = (HiveTableScan) ((DruidQuery)dq).getTableScan(); String tabName = hts.getTableAlias(); for (RelDataTypeField field : dq.getRowType().getFieldList()) { add(new ColumnInfo(tabName, field.getName())); } }
final String arg = DruidExpressions.toDruidExpression(call.getOperands().get(0), rowType, query); if (arg == null) { return null; final TimeZone tz = timezoneId(query, call.getOperands().get(0)); return applyTimestampFormat( DruidExpressions.applyTimestampFloor(arg, Period.days(1).toString(), "", tz), YYYY_MM_DD, tz); final String arg = DruidExpressions.toDruidExpression(call.getOperands().get(0), rowType, query); if (arg == null) { return null; String granularity = DruidExpressions.toDruidExpression(call.getOperands().get(1), rowType, query); if (granularity == null) { return null; DruidExpressions.applyTimestampFloor(arg, unit, "", tz), YYYY_MM_DD, tz);
private static String getTblAlias(RelNode rel) { if (null == rel) { return null; } if (rel instanceof HiveTableScan) { return ((HiveTableScan)rel).getTableAlias(); } if (rel instanceof DruidQuery) { DruidQuery dq = (DruidQuery) rel; return ((HiveTableScan) dq.getTableScan()).getTableAlias(); } if (rel instanceof HiveJdbcConverter) { HiveJdbcConverter conv = (HiveJdbcConverter) rel; return conv.getTableScan().getHiveTableScan().getTableAlias(); } if (rel instanceof Project) { return null; } if (rel.getInputs().size() == 1) { return getTblAlias(rel.getInput(0)); } return null; }
final int fieldCount = dq.getRowType().getFieldCount(); if (fieldsUsed.equals(ImmutableBitSet.range(fieldCount)) && extraFields.isEmpty()) {