public Column[] getSyncKeysColumnsForTable(Table table) { List<String> syncKeys = getSyncKeyNamesAsList(); if (syncKeys.size() > 0) { List<Column> columns = new ArrayList<Column>(); for (String syncKey : syncKeys) { Column col = table.getColumnWithName(syncKey); if (col != null) { columns.add(col); } else { log.error("The sync key column '{}' was specified for the '{}' trigger but was not found in the table", syncKey, triggerId); } } if (columns.size() > 0) { return columns.toArray(new Column[columns.size()]); } else { return table.getPrimaryKeyColumns(); } } else { return table.getPrimaryKeyColumns(); } }
ArrayList<Column> lookupColumns = new ArrayList<Column>( changedColumnsList); Column[] pks = targetTable.getPrimaryKeyColumns(); for (Column column : pks) { conflict.getDetectExpression()); pks = targetTable.getPrimaryKeyColumns(); for (Column column : pks) {
@Override protected Table readTable(Connection connection, DatabaseMetaDataWrapper metaData, Map<String, Object> values) throws SQLException { // TODO This needs some more work, since table names can be case // sensitive or lowercase // depending on the platform (really cute). // See http://dev.mysql.com/doc/refman/4.1/en/name-case-sensitivity.html // for more info. Table table = super.readTable(connection, metaData, values); if (table != null) { determineAutoIncrementFromResultSetMetaData(connection, table, table.getPrimaryKeyColumns()); } return table; }
conflict.getDetectExpression()); Column[] pks = targetTable.getPrimaryKeyColumns(); for (Column column : pks) {
@Override protected Table readTable(Connection connection, DatabaseMetaDataWrapper metaData, Map<String, Object> values) throws SQLException { Table table = super.readTable(connection, metaData, values); if (table != null) { // For at least version 1.7.2 we have to determine the // auto-increment columns from a result set meta data because the // database does not put this info into the database metadata // Since Hsqldb only allows IDENTITY for primary key columns, we // restrict our search to those columns determineAutoIncrementFromResultSetMetaData(connection, table, table.getPrimaryKeyColumns()); } return table; }
public CsvData mapRow(Row row) { return new CsvData(DataEventType.INSERT, toStringData(row, table.getPrimaryKeyColumns()), toStringData(row, table.getColumns())); } });
protected void removeInternalPrimaryKeyIndex(Connection connection, DatabaseMetaDataWrapper metaData, Table table) throws SQLException { Column[] pks = table.getPrimaryKeyColumns(); List<String> columnNames = new ArrayList<String>(); for (int columnIdx = 0; columnIdx < pks.length; columnIdx++) { columnNames.add(pks[columnIdx].getName()); } for (int indexIdx = 0; indexIdx < table.getIndexCount();) { IIndex index = table.getIndex(indexIdx); if (index.isUnique() && matches(index, columnNames) && isInternalPrimaryKeyIndex(connection, metaData, table, index)) { table.removeIndex(indexIdx); } else { indexIdx++; } } }
/** * Select a random row from the table in the connected database. Return null if there are no rows. * * TODO: Cache rows. * * @param sqlTemplate * @param table The table to select a row from. * @return A random row from the table. Null if there are no rows. */ private Row selectRandomRow(Table table) { Row row = null; // Select all rows and return the primary key columns. String sql = platform.createDmlStatement(DmlType.SELECT_ALL, table.getCatalog(), table.getSchema(), table.getName(), table.getPrimaryKeyColumns(), table.getColumns(), null).getSql(); final List<Row> rows = new ArrayList<Row>(); platform.getSqlTemplate().query(sql, RANDOM_SELECT_SIZE, new ISqlRowMapper<Object>() { public Object mapRow(Row row) { rows.add(row); return Boolean.TRUE; } }, null, null); if (rows.size() != 0) { int rowNum = getRand().nextInt(rows.size()); row = rows.get(rowNum); } return row; }
public boolean start(Table table) { if (!batch.isIgnored()) { this.table = table; if (!backwardsCompatible) { String catalogName = table.getCatalog(); println(CsvConstants.CATALOG, StringUtils.isNotBlank(catalogName) ? catalogName : ""); String schemaName = table.getSchema(); println(CsvConstants.SCHEMA, StringUtils.isNotBlank(schemaName) ? schemaName : ""); } String tableKey = table.getTableKey(); String fullyQualifiedTableName = table.getFullyQualifiedTableName(); String previousTableKey = processedTables.get(fullyQualifiedTableName); println(CsvConstants.TABLE, table.getName()); if (!tableKey.equals(previousTableKey)) { println(CsvConstants.KEYS, table.getPrimaryKeyColumns()); println(CsvConstants.COLUMNS, table.getColumns()); this.processedTables.put(fullyQualifiedTableName, tableKey); } return true; } else { return false; } }
String[] pkData = data.getPkData(targetTable); Object[] objectValues = writer.getPlatform().getObjectValues( writer.getBatch().getBinaryEncoding(), pkData, targetTable.getPrimaryKeyColumns()); DmlStatement stmt = writer.getPlatform().createDmlStatement(DmlType.FROM, targetTable); Column column = targetTable.getColumnWithName(columnName);
public void write(CsvData data) { String sql = null; switch (data.getDataEventType()) { case UPDATE: sql = buildSql(useUpsertStatements ? DmlType.UPSERT : DmlType.UPDATE, data.getParsedData(CsvData.ROW_DATA), currentTable.getColumns()); break; case INSERT: sql = buildSql(useUpsertStatements ? DmlType.UPSERT : DmlType.INSERT, data.getParsedData(CsvData.ROW_DATA), currentTable.getColumns()); break; case DELETE: sql = buildSql(DmlType.DELETE, data.getParsedData(CsvData.PK_DATA), currentTable.getPrimaryKeyColumns()); break; case SQL: sql = data.getParsedData(CsvData.ROW_DATA)[0]; break; case CREATE: IDdlBuilder builder = DdlBuilderFactory.createDdlBuilder(targetDatabaseName); sql = builder.createTable(currentTable); break; default: break; } if (sql != null) { this.payloadMap.get(this.currentBatch).add(sql); } }
Map<String, Object> columnDataMap = CollectionUtils .toMap(columnNames, objectValues); Column[] pkColumns = table.getPrimaryKeyColumns(); ISqlTemplate sqlTemplate = platform.getSqlTemplate(); Object[] args = new Object[pkColumns.length];
public String createCsvPrimaryKeySql(Trigger trigger, TriggerHistory triggerHistory, Table table, Channel channel, String whereClause) { String sql = sqlTemplates.get(INITIAL_LOAD_SQL_TEMPLATE); Column[] columns = table.getPrimaryKeyColumns(); String columnsText = buildColumnString(symmetricDialect.getInitialLoadTableAlias(), symmetricDialect.getInitialLoadTableAlias(), "", columns, DataEventType.INSERT, false, channel, trigger).toString(); sql = FormatUtils.replace("columns", columnsText, sql); sql = FormatUtils.replace("oracleToClob", trigger.isUseCaptureLobs() ? "to_clob('')||" : "", sql); sql = FormatUtils.replace("tableName", SymmetricUtils.quote(symmetricDialect, table.getName()), sql); sql = FormatUtils.replace("schemaName", triggerHistory == null ? getSourceTablePrefix(trigger) : getSourceTablePrefix(triggerHistory), sql); sql = FormatUtils.replace("whereClause", whereClause, sql); sql = FormatUtils.replace( "primaryKeyWhereString", getPrimaryKeyWhereString(symmetricDialect.getInitialLoadTableAlias(), table.hasPrimaryKey() ? table.getPrimaryKeyColumns() : table.getColumns()), sql); return sql; }
protected boolean isVersionNewer(Conflict conflict, DatabaseWriter writer, CsvData data) { String columnName = conflict.getDetectExpression(); Table targetTable = writer.getTargetTable(); Table sourceTable = writer.getSourceTable(); String[] pkData = data.getPkData(targetTable); Object[] objectValues = writer.getPlatform().getObjectValues( writer.getBatch().getBinaryEncoding(), pkData, targetTable.getPrimaryKeyColumns()); DmlStatement stmt = writer.getPlatform().createDmlStatement(DmlType.FROM, targetTable); String sql = stmt.getColumnsSql(new Column[] { targetTable.getColumnWithName(columnName) }); Long existingVersion = writer.getTransaction() .queryForObject(sql, Long.class, objectValues); if (existingVersion == null) { return true; } else { Map<String, String> newData = data.toColumnNameValuePairs(sourceTable.getColumnNames(), CsvData.ROW_DATA); Long loadingVersion = Long.valueOf(newData.get(columnName)); return loadingVersion > existingVersion; } }
table.getPrimaryKeyColumns(), table.getNonPrimaryKeyColumns(), null); Column[] columns = updStatement.getMetaData(); Object[] values = new Object[columns.length];
protected String getCurData(ISqlTransaction transaction) { String curVal = null; if (writerSettings.isSaveCurrentValueOnError()) { String[] keyNames = Table.getArrayColumns(context.getTable().getPrimaryKeyColumns()); String[] columnNames = Table.getArrayColumns(context.getTable().getColumns()); .toMap(columnNames, objectValues); Column[] pkColumns = targetTable.getPrimaryKeyColumns(); Object[] args = new Object[pkColumns.length]; for (int i = 0; i < pkColumns.length; i++) {
.getTable().getColumns())); error.setPrimaryKeyColumnNames(Table.getCommaDeliminatedColumns(context .getTable().getPrimaryKeyColumns())); error.setCsvData(context.getData()); error.setCurData((String)context.get(DatabaseWriter.CUR_DATA));
public String createCsvDataSql(Trigger trigger, TriggerHistory triggerHistory, Table originalTable, Channel channel, String whereClause) { Table table = originalTable.copyAndFilterColumns(triggerHistory.getParsedColumnNames(), triggerHistory.getParsedPkColumnNames(), true); String sql = sqlTemplates.get(INITIAL_LOAD_SQL_TEMPLATE); Column[] columns = table.getColumns(); String columnsText = buildColumnString(symmetricDialect.getInitialLoadTableAlias(), symmetricDialect.getInitialLoadTableAlias(), "", columns, DataEventType.INSERT, false, channel, trigger).columnString; sql = FormatUtils.replace("columns", columnsText, sql); sql = FormatUtils.replace("oracleToClob", trigger.isUseCaptureLobs() ? "to_clob('')||" : "", sql); sql = FormatUtils.replace("tableName", SymmetricUtils.quote(symmetricDialect, table.getName()), sql); sql = FormatUtils.replace("schemaName", triggerHistory == null ? getSourceTablePrefix(trigger) : getSourceTablePrefix(triggerHistory), sql); sql = FormatUtils.replace("whereClause", whereClause, sql); sql = FormatUtils.replace( "primaryKeyWhereString", getPrimaryKeyWhereString(symmetricDialect.getInitialLoadTableAlias(), table.hasPrimaryKey() ? table.getPrimaryKeyColumns() : table.getColumns()), sql); sql = replaceDefaultSchemaAndCatalog(sql); return sql; }
"primaryKeyWhereString", getPrimaryKeyWhereString(symmetricDialect.getInitialLoadTableAlias(), table.hasPrimaryKey() ? table.getPrimaryKeyColumns() : table.getColumns()), sql);
: getSourceTablePrefix(history), ddl); Column[] primaryKeyColumns = table.getPrimaryKeyColumns(); ddl = FormatUtils.replace( "oldKeys", "primaryKeyWhereString", getPrimaryKeyWhereString(dml == DataEventType.DELETE ? oldTriggerValue : newTriggerValue, table.hasPrimaryKey() ? table.getPrimaryKeyColumns() : table.getColumns()), ddl);