static BigQueryError fromPb(ErrorProto errorPb) { return new BigQueryError( errorPb.getReason(), errorPb.getLocation(), errorPb.getMessage(), errorPb.getDebugInfo()); } }
private void compareBigQueryError(BigQueryError expected, BigQueryError value) { assertEquals(expected, value); assertEquals(expected.hashCode(), value.hashCode()); assertEquals(expected.toString(), value.toString()); assertEquals(expected.getReason(), value.getReason()); assertEquals(expected.getLocation(), value.getLocation()); assertEquals(expected.getDebugInfo(), value.getDebugInfo()); assertEquals(expected.getMessage(), value.getMessage()); } }
@Override public BigQueryError apply(ErrorProto pb) { return BigQueryError.fromPb(pb); } };
@Test public void testConstructor() { assertEquals(REASON, ERROR.getReason()); assertEquals(LOCATION, ERROR.getLocation()); assertEquals(DEBUG_INFO, ERROR.getDebugInfo()); assertEquals(MESSAGE, ERROR.getMessage()); assertEquals(REASON, ERROR_INCOMPLETE.getReason()); assertEquals(LOCATION, ERROR_INCOMPLETE.getLocation()); assertEquals(null, ERROR_INCOMPLETE.getDebugInfo()); assertEquals(MESSAGE, ERROR_INCOMPLETE.getMessage()); }
@Test public void testUpdateNonExistingTable() { TableInfo tableInfo = TableInfo.of( TableId.of(DATASET, "test_update_non_existing_table"), StandardTableDefinition.of(SIMPLE_SCHEMA)); try { bigquery.update(tableInfo); fail("BigQueryException was expected"); } catch (BigQueryException e) { BigQueryError error = e.getError(); assertNotNull(error); assertEquals("notFound", error.getReason()); assertNotNull(error.getMessage()); } }
attributes.put(BigQueryAttributes.JOB_ERROR_MSG_ATTR, job.getStatus().getError().getMessage()); attributes.put(BigQueryAttributes.JOB_ERROR_REASON_ATTR, job.getStatus().getError().getReason()); attributes.put(BigQueryAttributes.JOB_ERROR_LOCATION_ATTR, job.getStatus().getError().getLocation()); } else { getLogger().log(LogLevel.WARN, job.getStatus().getError().getMessage()); flowFile = session.penalize(flowFile); session.transfer(flowFile, REL_FAILURE);
@Override public ErrorProto apply(BigQueryError error) { return error.toPb(); } };
public BigQueryException(int code, String message, BigQueryError error) { super(code, message, error != null ? error.getReason() : null, true, RETRYABLE_ERRORS); this.error = error; }
assertNull(response.getErrorsFor(1L)); assertEquals(1, response.getErrorsFor(0L).size()); assertEquals("ErrorMessage", response.getErrorsFor(0L).get(0).getMessage());
throw new RuntimeException(queryJob.getStatus().getError().toString());
private boolean onlyContainsInvalidSchemaErrors(Map<Long, List<BigQueryError>> errors) { boolean invalidSchemaError = false; for (List<BigQueryError> errorList : errors.values()) { for (BigQueryError error : errorList) { if (error.getReason().equals("invalid") && error.getMessage().contains("no such field")) { invalidSchemaError = true; } else if (!error.getReason().equals("stopped")) { /* if some rows are in the old schema format, and others aren't, the old schema * formatted rows will show up as error: stopped. We still want to continue if this is * the case, because these errors don't represent a unique error if there are also * invalidSchemaErrors. */ return false; } } } // if we only saw "stopped" errors, we want to return false. (otherwise, return true) return invalidSchemaError; } }
public BigQueryException(int code, String message, BigQueryError error) { super(code, message, error != null ? error.getReason() : null, true, RETRYABLE_ERRORS); this.error = error; }
assertNull(response.getErrorsFor(1L)); assertEquals(1, response.getErrorsFor(0L).size()); assertEquals("ErrorMessage", response.getErrorsFor(0L).get(0).getMessage());
private static void throwIfUnsuccessfulJobStatus(Job job, TableId tableId) { if (job != null && job.getStatus().getError() == null) { LOG.info("successfully published table {}", tableId); } else { String error; if (job == null) { error = "job no longer exists"; } else { error = job.getStatus().getError().toString(); } LOG.error("Could not copy BigQuery table {} from staging to target with error: {}", tableId, error); throw new RuntimeException(error); } } }
private static String formatInsertAllErrors(Map<Long, List<BigQueryError>> errorsMap) { StringBuilder messageBuilder = new StringBuilder(); messageBuilder.append("table insertion failed for the following rows:"); for (Map.Entry<Long, List<BigQueryError>> errorsEntry : errorsMap.entrySet()) { for (BigQueryError error : errorsEntry.getValue()) { messageBuilder.append(String.format( "%n\t[row index %d]: %s: %s", errorsEntry.getKey(), error.getReason(), error.getMessage() )); } } return messageBuilder.toString(); } }
public BigQueryException(IOException exception) { super(exception, true, RETRYABLE_ERRORS); BigQueryError error = null; if (getReason() != null) { error = new BigQueryError(getReason(), getLocation(), getMessage(), getDebugInfo()); } this.error = error; }
com.google.api.services.bigquery.model.JobStatus toPb() { com.google.api.services.bigquery.model.JobStatus statusPb = new com.google.api.services.bigquery.model.JobStatus(); if (state != null) { statusPb.setState(state.toString()); } if (error != null) { statusPb.setErrorResult(error.toPb()); } if (executionErrors != null) { statusPb.setErrors(Lists.transform(executionErrors, BigQueryError.TO_PB_FUNCTION)); } return statusPb; }
static JobStatus fromPb(com.google.api.services.bigquery.model.JobStatus statusPb) { List<BigQueryError> allErrors = null; if (statusPb.getErrors() != null) { allErrors = Lists.transform(statusPb.getErrors(), BigQueryError.FROM_PB_FUNCTION); } BigQueryError error = statusPb.getErrorResult() != null ? BigQueryError.fromPb(statusPb.getErrorResult()) : null; return new JobStatus(State.valueOf(statusPb.getState()), error, allErrors); } }