DataSource ds = JdbcSchema.dataSource(url, driver, user, pswd); SqlDialect jdbcDialect = JdbcSchema.createDialect(SqlDialectFactoryImpl.INSTANCE, ds); JdbcConvention jc = JdbcConvention.of(jdbcDialect, null, dataBaseType); JdbcSchema schema = new JdbcSchema(ds, jc.dialect, jc, null/*catalog */, null/*schema */); JdbcTable jt = (JdbcTable) schema.getTable(tableName); if (jt == null) { throw new SemanticException("Table " + tableName + " was not found in the database");
@Override public Collection<Function> getFunctions(String name) { return inner.getFunctions(name); }
final String jdbcUser = (String) operand.get("jdbcUser"); final String jdbcPassword = (String) operand.get("jdbcPassword"); dataSource = dataSource(jdbcUrl, jdbcDriver, jdbcUser, jdbcPassword); return JdbcSchema.create( parentSchema, name, dataSource, jdbcCatalog, jdbcSchema); } else { SqlDialectFactory factory = AvaticaUtils.instantiatePlugin( SqlDialectFactory.class, sqlDialectFactory); return JdbcSchema.create( parentSchema, name, dataSource, factory, jdbcCatalog, jdbcSchema);
public static JdbcSchema create( SchemaPlus parentSchema, String name, DataSource dataSource, SqlDialectFactory dialectFactory, String catalog, String schema) { final Expression expression = Schemas.subSchemaExpression(parentSchema, name, JdbcSchema.class); final SqlDialect dialect = createDialect(dialectFactory, dataSource); final JdbcConvention convention = JdbcConvention.of(dialect, expression, name); return new JdbcSchema(dataSource, dialect, convention, catalog, schema); }
public Schema create( SchemaPlus parentSchema, String name, Map<String, Object> operand) { return JdbcSchema.create(parentSchema, name, operand); } }
RelProtoDataType getRelDataType(String catalogName, String schemaName, String tableName) throws SQLException { Connection connection = null; try { connection = dataSource.getConnection(); DatabaseMetaData metaData = connection.getMetaData(); return getRelDataType(metaData, catalogName, schemaName, tableName); } finally { close(connection, null, null); } }
public CapitalizingJdbcSchema(List<String> parentSchemaPath, String name, DataSource dataSource, SqlDialect dialect, JdbcConvention convention, String catalog, String schema) { super(parentSchemaPath, name); inner = new JdbcSchema(dataSource, dialect, convention, catalog, schema); }
public JdbcStoragePlugin(JdbcStorageConfig config, DrillbitContext context, String name) { super(context, name); this.config = config; BasicDataSource source = new BasicDataSource(); source.setDriverClassName(config.getDriver()); source.setUrl(config.getUrl()); if (config.getUsername() != null) { source.setUsername(config.getUsername()); } if (config.getPassword() != null) { source.setPassword(config.getPassword()); } this.source = source; this.dialect = JdbcSchema.createDialect(SqlDialectFactoryImpl.INSTANCE, source); this.convention = new DrillJdbcConvention(dialect, name); }
/** * Creates a sql converter using the given database and datasource. * TODO See https://github.com/yahoo/fili/issues/511 * * @param url The url location of the database to connect to. * @param driver The driver (i.e. "org.h2.Driver") to connect with. * @param username The username to connect to the database. * @param password The password to connect to the database. * @param objectMapper The mapper for all JSON processing. * * @throws SQLException if can't read from database. */ public DefaultSqlBackedClient( String url, String driver, String username, String password, ObjectMapper objectMapper ) throws SQLException { DataSource dataSource = JdbcSchema.dataSource(url, driver, username, password); calciteHelper = new CalciteHelper(dataSource); druidQueryToSqlConverter = new DruidQueryToSqlConverter(calciteHelper); jsonWriter = objectMapper; }
public RelDataType getRowType(RelDataTypeFactory typeFactory) { if (protoRowType == null) { try { protoRowType = jdbcSchema.getRelDataType( jdbcCatalogName, jdbcSchemaName, jdbcTableName); } catch (SQLException e) { throw new RuntimeException( "Exception while reading definition of table '" + jdbcTableName + "'", e); } } return protoRowType.apply(typeFactory); }
private Map<String, JdbcTable> computeTables() { // 1: Get all tables from the DB as usual Set<String> rawTableNames = super.getTableNames(); // Forces computeTables Map<String, JdbcTable> tables = new HashMap<>(); for (String rawTableName : rawTableNames) { tables.put(rawTableName, (JdbcTable) super.getTable(rawTableName)); } // 2: Filter out any table/view which has a name in the journalled list tables.keySet().removeAll(journalledTableKeys.keySet()); // 3: For each table in the journalled list, generate a fake table from its journal for (String virtualName : journalledTableKeys.keySet()) { JdbcTable journalTable = tables.get(journalNameFor(virtualName)); if (journalTable != null) { tables.put(virtualName, new JournalledJdbcTable( virtualName, this, journalTable, journalledTableKeys.get(virtualName) )); } } return tables; }
@Override public Table getTable(String name) { Table table = inner.getTable(name); if (table != null) { return table; } if (!areTableNamesCaseSensitive()) { // Oracle and H2 changes unquoted identifiers to uppercase. table = inner.getTable(name.toUpperCase()); if (table != null) { return table; } // Postgres changes unquoted identifiers to lowercase. return inner.getTable(name.toLowerCase()); } // no table was found. return null; }
private synchronized ImmutableMap<String, JdbcTable> getTableMap( boolean force) { if (force || tableMap == null) { tableMap = computeTables(); } return tableMap; }
"Exception while reading tables", e); } finally { close(connection, null, resultSet);
final String jdbcUser = (String) operand.get("jdbcUser"); final String jdbcPassword = (String) operand.get("jdbcPassword"); dataSource = dataSource(jdbcUrl, jdbcDriver, jdbcUser, jdbcPassword); return JdbcSchema.create( parentSchema, name, dataSource, jdbcCatalog, jdbcSchema); } else { SqlDialectFactory factory = AvaticaUtils.instantiatePlugin( SqlDialectFactory.class, sqlDialectFactory); return JdbcSchema.create( parentSchema, name, dataSource, factory, jdbcCatalog, jdbcSchema);
public static JdbcSchema create( SchemaPlus parentSchema, String name, DataSource dataSource, SqlDialectFactory dialectFactory, String catalog, String schema) { final Expression expression = Schemas.subSchemaExpression(parentSchema, name, JdbcSchema.class); final SqlDialect dialect = createDialect(dialectFactory, dataSource); final JdbcConvention convention = JdbcConvention.of(dialect, expression, name); return new JdbcSchema(dataSource, dialect, convention, catalog, schema); }
public Schema create( SchemaPlus parentSchema, String name, Map<String, Object> operand) { return JdbcSchema.create(parentSchema, name, operand); } }
RelProtoDataType getRelDataType(String catalogName, String schemaName, String tableName) throws SQLException { Connection connection = null; try { connection = dataSource.getConnection(); DatabaseMetaData metaData = connection.getMetaData(); return getRelDataType(metaData, catalogName, schemaName, tableName); } finally { close(connection, null, null); } }
public Schema snapshot(SchemaVersion version) { return new JdbcSchema(dataSource, dialect, convention, catalog, schema, tableMap); }
/** * Returns a suitable SQL dialect for the given data source. * * @param dataSource The data source * * @deprecated Use {@link #createDialect(SqlDialectFactory, DataSource)} instead */ @Deprecated // to be removed before 2.0 public static SqlDialect createDialect(DataSource dataSource) { return createDialect(SqlDialectFactoryImpl.INSTANCE, dataSource); }