public static KettleDatabaseBatchException createKettleDatabaseBatchException( String message, SQLException ex ) { KettleDatabaseBatchException kdbe = new KettleDatabaseBatchException( message, ex ); if ( ex instanceof BatchUpdateException ) { kdbe.setUpdateCounts( ( (BatchUpdateException) ex ).getUpdateCounts() ); } else { // Null update count forces rollback of batch kdbe.setUpdateCounts( null ); } List<Exception> exceptions = new ArrayList<Exception>(); SQLException nextException = ex.getNextException(); SQLException oldException = null; // This construction is specifically done for some JDBC drivers, these // drivers // always return the same exception on getNextException() (and thus go // into an infinite loop). // So it's not "equals" but != (comments from Sven Boden). while ( ( nextException != null ) && ( oldException != nextException ) ) { exceptions.add( nextException ); oldException = nextException; nextException = nextException.getNextException(); } kdbe.setExceptionsList( exceptions ); return kdbe; }
int[] updCntsOnErr = batchCause.getUpdateCounts();
/** * @throws SQLException If failed. */ @Test public void testHeterogeneousBatchException() throws SQLException { stmt.addBatch("insert into Person (_key, id, firstName, lastName, age) values ('p0', 0, 'Name0', 'Lastname0', 10)"); stmt.addBatch("insert into Person (_key, id, firstName, lastName, age) values ('p1', 1, 'Name1', 'Lastname1', 20), ('p2', 2, 'Name2', 'Lastname2', 30)"); stmt.addBatch("merge into Person (_key, id, firstName, lastName, age) values ('p3', 3, 'Name3', 'Lastname3', 40)"); stmt.addBatch("update Person set id = 'FAIL' where age >= 30"); // Fail. stmt.addBatch("merge into Person (_key, id, firstName, lastName, age) values ('p0', 2, 'Name2', 'Lastname2', 50)"); stmt.addBatch("delete from Person where FAIL <= 40"); // Fail. try { stmt.executeBatch(); fail("BatchUpdateException must be thrown"); } catch (BatchUpdateException e) { int[] updCnts = e.getUpdateCounts(); if (!e.getMessage().contains("Value conversion failed")) { log.error("Invalid exception: ", e); fail(); } assertEquals("Invalid update counts size", 6, updCnts.length); assertArrayEquals("Invalid update count", new int[] {1, 2, 1, Statement.EXECUTE_FAILED, 1, Statement.EXECUTE_FAILED}, updCnts); } }
int[] updCnts = e.getUpdateCounts();
int[] updCnts = e.getUpdateCounts();
/** * Test error code for the case when error is caused on batch execution. * @throws SQLException if failed. */ @Test public void testBatchUpdateException() throws SQLException { try (final Connection conn = getConnection()) { try (Statement stmt = conn.createStatement()) { stmt.executeUpdate("CREATE TABLE test (id int primary key, val varchar)"); stmt.addBatch("insert into test (id, val) values (1, 'val1')"); stmt.addBatch("insert into test (id, val) values (2, 'val2')"); stmt.addBatch("insert into test (id1, val1) values (3, 'val3')"); stmt.executeBatch(); fail("BatchUpdateException is expected"); } catch (BatchUpdateException e) { assertEquals(3, e.getUpdateCounts().length); assertArrayEquals("", new int[] {1, 1, Statement.EXECUTE_FAILED}, e.getUpdateCounts()); assertEquals("42000", e.getSQLState()); assertTrue("Unexpected error message: " + e.getMessage(), e.getMessage() != null && e.getMessage().contains("Failed to parse query. Column \"ID1\" not found")); } } }
int[] rowCounts = be.getUpdateCounts();
result = e.getUpdateCounts(); if (result == null) { throw new PersistenceException(e);
int[] updCnts = e.getUpdateCounts();
int[] updCnts = e.getUpdateCounts();
/** * @throws SQLException if failed. */ @Test public void testSingleItemBatchError() throws SQLException { formBatch(1, 2); prepStmt.executeBatch(); formBatch(1, 2); // Duplicate key BatchUpdateException reason = (BatchUpdateException) GridTestUtils.assertThrows(log, new Callable<Object>() { @Override public Object call() throws Exception { return prepStmt.executeBatch(); } }, BatchUpdateException.class, "Failed to INSERT some keys because they are already in cache"); // Check update counts in the exception. assertTrue(F.isEmpty(reason.getUpdateCounts())); }
final int[] updateCounts = ((BatchUpdateException) e).getUpdateCounts(); final List<FlowFile> batchFlowFiles = enclosure.getFlowFiles();
int[] updCnts = e.getUpdateCounts();
int[] updCnts = e.getUpdateCounts();
int[] updCnts = e.getUpdateCounts();
int[] updCnts = e.getUpdateCounts();
/** * @throws SQLException If failed. */ @Test public void testErrorAmidstBatch() throws SQLException { BatchUpdateException reason = (BatchUpdateException) GridTestUtils.assertThrows(log, new Callable<Object>() { @Override public Object call() throws Exception { try (Statement stmt = conn.createStatement()) { stmt.addBatch("INSERT INTO Person(_key, id, firstName, lastName, age, data) " + "VALUES ('p1', 0, 'J', 'W', 250, RAWTOHEX('W'))"); stmt.addBatch("UPDATE Person SET id = 3, firstName = 'Mike', lastName = 'Green', " + "age = 40, data = RAWTOHEX('Green') WHERE _key = 'p3'"); stmt.addBatch("SELECT id FROM Person WHERE _key = 'p1'"); return stmt.executeBatch(); } } }, BatchUpdateException.class, "Given statement type does not match that declared by JDBC driver"); // Check update counts in the exception. int[] counts = reason.getUpdateCounts(); assertEquals(2, counts.length); assertEquals(1, counts[0]); assertEquals(0, counts[1]); }
/** * @throws SQLException if failed. */ @Test public void testErrorAmidstBatch() throws SQLException { formBatch(1, 2); formBatch(3, 1); // Duplicate key BatchUpdateException reason = (BatchUpdateException) GridTestUtils.assertThrows(log, new Callable<Object>() { @Override public Object call() throws Exception { return prepStmt.executeBatch(); } }, BatchUpdateException.class, "Failed to INSERT some keys because they are already in cache"); // Check update counts in the exception. int[] counts = reason.getUpdateCounts(); assertNotNull(counts); assertEquals(1, counts.length); assertEquals(2, counts[0]); }
int[] updateCounts = be.getUpdateCounts();
int[] updateCounts = be.getUpdateCounts();