private JdbcKeyValueService( Settings settings, SQLDialect sqlDialect, DataSource dataSource, String tablePrefix, int rowBatchSize, int batchSizeForReads, int batchSizeForMutations) { this.settings = settings; this.sqlDialect = sqlDialect; this.dataSource = dataSource; this.tablePrefix = tablePrefix; this.rowBatchSize = rowBatchSize; this.batchSizeForReads = batchSizeForReads; this.batchSizeForMutations = batchSizeForMutations; METADATA_TABLE = table(tablePrefix + "_metadata"); }
private JdbcTimestampBoundStore(JdbcKeyValueService kvs) { this.kvs = kvs; TABLE = DSL.table(kvs.tableName(TIMESTAMP_TABLE)); }
Table<Record> atlasTable(TableReference tableRef) { return table(tableName(tableRef)); }
/** * A synonym for {@link #unnest(Collection)}. * * @see #unnest(Collection) */ @Support public static Table<?> table(Collection<?> list) { return table(list.toArray()); }
@Override public final TableOptionalOnStep<Record> straightJoin(Name name) { return straightJoin(table(name)); } }
@Nonnull @Override public Table arrayElements(Field<?> iteratorVariable, Field<?> field) { return DSL.table("jsonb_array_elements(" + field.toString() + ")").as(iteratorVariable.toString()); }
public AbstractStructuresCache( CollectionSchema colSchema, String schemaName, StructureConverter converter) { this.colSchema = colSchema; this.converter = converter; this.table = DSL.table(DSL.name(schemaName, "structures")); this.structures = HashBiMap.create(1000); this.insertLock = new ReentrantLock(); }
/** * Use a previously obtained record as a new Table */ @Support public static <R extends Record> Table<R> table(R record) { return table((R[]) new Record[] { record }); }
private void readObject(java.io.ObjectInputStream stream) throws IOException, ClassNotFoundException { stream.defaultReadObject(); insertLock = new ReentrantLock(); table = DSL.table(DSL.name(colSchema.getName(), "structures")); }
/** * Constructor. * * @param ds datasource * @param dialect SQL dialect * @param table table storing the preferences (train, test, etc.) */ public SQLPreferenceData(DataSource ds, SQLDialect dialect, String table) { this.dsl = DSL.using(ds, dialect); this.DATA = DSL.table(name(table.toUpperCase())); }
ctx.insertInto(table(tableName(tableRef)), field(ROW_NAME, byte[].class), field(COL_NAME, byte[].class),
/** * A synonym for {@link #unnest(Object[])}. * * @see #unnest(Object[]) */ @Support public static Table<?> table(Object[] array) { return table(val(array)); }
@Override public final Table<Record> getTable() { if (table == null) table = name.qualified() ? DSL.table(name.qualifier()) : null; return table; } }
private void putBatch(DSLContext ctx, TableReference tableRef, PutBatch batch, boolean allowReinserts) { InsertValuesStep4<Record, byte[], byte[], Long, byte[]> query = ctx.insertInto(table(tableName(tableRef)), field(ROW_NAME, byte[].class), field(COL_NAME, byte[].class), field(TIMESTAMP, Long.class), field(VALUE, byte[].class)); query = batch.addValuesForInsert(query); try { query.execute(); } catch (DataAccessException e) { if (allowReinserts) { Result<? extends Record> records = ctx .select(A_ROW_NAME, A_COL_NAME, A_TIMESTAMP, A_VALUE) .from(atlasTable(tableRef).as(ATLAS_TABLE)) .where(row(A_ROW_NAME, A_COL_NAME, A_TIMESTAMP).in(batch.getRowsForSelect())) .fetch(); if (records.isEmpty()) { throw e; } PutBatch nextBatch = batch.getNextBatch(records); if (nextBatch != null) { putBatch(ctx, tableRef, nextBatch, allowReinserts); return; } } throw new KeyAlreadyExistsException("Conflict on table " + tableRef, e); } }
@Override public Query getInsertStatement(Edge element) { JdbcSchema.Row row = toRow(element); if (row == null) return null; Map<Field<Object>, Object> fields = row.getFields().entrySet().stream() .collect(Collectors.toMap((entry) -> field(entry.getKey()), Map.Entry::getValue)); // return DSL.update(table(getTable())).set(fields).where(field(this.getFieldByPropertyKey(T.id.getAccessor())).eq(row.getId())); return DSL.insertInto(table(getTable())).set(fields); } }
@Override public void clear() { Loggers.SQL_LOG.trace("[IS0123] SQL store: Clearing {} records", getCacheName()); int numDeleted; try { numDeleted = sql.deleteFrom(table(sqlRecordTransformer.getTableName())).execute(); } catch (Exception e) { Loggers.SQL_LOG.error("[IS0124] {}: {}", e.getMessage(), e); throw new PersistenceException(e.getMessage(), e); } Loggers.SQL_LOG.info("[IS0125] SQL store: Cleared {} {} records", numDeleted, sqlRecordTransformer.getTableName()); }
private final int executeSelectFromHSQLDB() { DSLContext create = create(configuration); Result<?> result = create.selectFrom(table(asField())).fetch(); outValues.put(returnParameter, result); return 0; }
private final int executeSelectFrom() { DSLContext create = create(configuration); Result<?> result = create.selectFrom(table(asField())).fetch(); results.put(returnParameter, result); return 0; }
/** * Use a previously obtained set of records as a new Table */ @Support public static <R extends Record> Table<R> table(R... records) { if (records == null || records.length == 0) return (Table<R>) new Dual(); Result<R> result = new ResultImpl(configuration(records[0]), records[0].fields()); result.addAll(Arrays.asList(records)); return table(result); }
private SqlSubJoin(SqlQuery parent, SqlQuery sub, SqlJoin join) { this.sqlQuery = sub; AbstractSqlDatabase database = sub.database; String alias = sub.recordTableAlias; Field<?> id = DSL.field(DSL.name(alias, database.recordIdField.getName()), database.uuidType()); this.table = sub.initialize(DSL.table(DSL.name(database.recordTable.getName())).as(alias)); this.on = join.valueField.eq(id); if (sub.needsDistinct) { parent.needsDistinct = true; } } }