@Override public TranslatableTable apply(final List<Object> arguments) { final RelDataType rowType; // Using an escalator here is a hack, but it's currently needed to get the row type. Ideally, some // later refactoring would make this unnecessary, since there is no actual query going out herem. final AuthenticationResult authenticationResult = escalator.createEscalatedAuthenticationResult(); try (final DruidPlanner planner = plannerFactory.createPlanner(null, authenticationResult)) { rowType = planner.plan(viewSql).rowType(); } catch (Exception e) { throw Throwables.propagate(e); } return new ViewTable( null, RelDataTypeImpl.proto(rowType), viewSql, ImmutableList.of(DruidSchema.NAME), null ); }
/** * Computes the digest field. This should be called in every non-abstract * subclass constructor once the type is fully defined. */ protected void computeDigest() { StringBuilder sb = new StringBuilder(); generateTypeString(sb, true); if (!isNullable()) { sb.append(" NOT NULL"); } digest = sb.toString(); }
getFieldRecurse(slots, this, 0, fieldName, caseSensitive); loop: for (Slot slot : slots) {
@Override public String toString() { StringBuilder sb = new StringBuilder(); generateTypeString(sb, false); return sb.toString(); }
public RelDataTypePrecedenceList getPrecedenceList() { RelDataTypePrecedenceList list = SqlTypeExplicitPrecedenceList.getListForType(this); if (list != null) { return list; } return super.getPrecedenceList(); } }
public SqlIdentifier getSqlIdentifier() { SqlTypeName typeName = getSqlTypeName(); if (typeName == null) { return null; } return new SqlIdentifier( typeName.name(), SqlParserPos.ZERO); }
public boolean containsType(RelDataType type) { return getFamily() == type.getFamily(); }
RelDataTypeImpl.extra(inputRowType) == null ? Collections.emptySet() : combinedInputExtraFields;
/** * Computes the digest field. This should be called in every non-abstract * subclass constructor once the type is fully defined. */ protected void computeDigest() { StringBuilder sb = new StringBuilder(); generateTypeString(sb, true); if (!isNullable()) { sb.append(" NOT NULL"); } digest = sb.toString(); }
getFieldRecurse(slots, this, 0, fieldName, caseSensitive); loop: for (Slot slot : slots) {
@Override public String toString() { StringBuilder sb = new StringBuilder(); generateTypeString(sb, false); return sb.toString(); }
public RelDataTypePrecedenceList getPrecedenceList() { RelDataTypePrecedenceList list = SqlTypeExplicitPrecedenceList.getListForType(this); if (list != null) { return list; } return super.getPrecedenceList(); } }
public SqlIdentifier getSqlIdentifier() { SqlTypeName typeName = getSqlTypeName(); if (typeName == null) { return null; } return new SqlIdentifier( typeName.name(), SqlParserPos.ZERO); }
public boolean containsType(RelDataType type) { return getFamily() == type.getFamily(); }
RelDataTypeImpl.extra(inputRowType) == null ? Collections.emptySet() : combinedInputExtraFields;
conf, new HashMap<>(), new HashMap<>(), new AtomicInteger()); DruidTable druidTable = new DruidTable(new DruidSchema(address, address, false), dataSource, RelDataTypeImpl.proto(rowType), metrics, DruidTable.DEFAULT_TIMESTAMP_COLUMN, intervals, null, null); final TableScan scan = new HiveTableScan(cluster, cluster.traitSetOf(HiveRelNode.CONVENTION),
private static void getFieldRecurse(List<Slot> slots, RelDataType type, int depth, String fieldName, boolean caseSensitive) { while (slots.size() <= depth) { slots.add(new Slot()); } final Slot slot = slots.get(depth); for (RelDataTypeField field : type.getFieldList()) { if (Util.matches(caseSensitive, field.getName(), fieldName)) { slot.count++; slot.field = field; } } // No point looking to depth + 1 if there is a hit at depth. if (slot.count == 0) { for (RelDataTypeField field : type.getFieldList()) { if (field.getType().isStruct()) { getFieldRecurse(slots, field.getType(), depth + 1, fieldName, caseSensitive); } } } }
dataSource, RelDataTypeImpl.proto(rowType), metrics, DruidTable.DEFAULT_TIMESTAMP_COLUMN, intervals); final TableScan scan = new HiveTableScan(cluster, cluster.traitSetOf(HiveRelNode.CONVENTION), optTable, viewTable.getTableName(), null, false, false);
private static void getFieldRecurse(List<Slot> slots, RelDataType type, int depth, String fieldName, boolean caseSensitive) { while (slots.size() <= depth) { slots.add(new Slot()); } final Slot slot = slots.get(depth); for (RelDataTypeField field : type.getFieldList()) { if (Util.matches(caseSensitive, field.getName(), fieldName)) { slot.count++; slot.field = field; } } // No point looking to depth + 1 if there is a hit at depth. if (slot.count == 0) { for (RelDataTypeField field : type.getFieldList()) { if (field.getType().isStruct()) { getFieldRecurse(slots, field.getType(), depth + 1, fieldName, caseSensitive); } } } }
dataSource, RelDataTypeImpl.proto(rowType), metrics, DruidTable.DEFAULT_TIMESTAMP_COLUMN, intervals); final TableScan scan = new HiveTableScan(cluster, cluster.traitSetOf(HiveRelNode.CONVENTION), optTable, null == tableAlias ? tabMetaData.getTableName() : tableAlias,