@Override public void evaluate() throws Throwable { evaluateRan[0] = true; expectedLogs.verifyNotLogged(messageUnexpected); LOG.info(messageExpected); expectedLogs.verifyInfo(messageExpected); } },
/** * Tests that the {@link DataflowPipelineJob} understands that the {@link State#FAILED FAILED} * state is terminal. */ @Test public void testWaitToFinishFailed() throws Exception { assertEquals(State.FAILED, mockWaitToFinishInState(State.FAILED)); expectedLogs.verifyInfo(String.format("Job %s failed with status FAILED.", JOB_ID)); }
/** * Tests that the {@link DataflowPipelineJob} understands that the {@link State#CANCELLED * CANCELLED} state is terminal. */ @Test public void testWaitToFinishCancelled() throws Exception { assertEquals(State.CANCELLED, mockWaitToFinishInState(State.CANCELLED)); expectedLogs.verifyInfo(String.format("Job %s finished with status CANCELLED", JOB_ID)); }
/** * Tests that the {@link DataflowPipelineJob} understands that the {@link State#UPDATED UPDATED} * state is terminal. */ @Test public void testWaitToFinishUpdated() throws Exception { assertEquals(State.UPDATED, mockWaitToFinishInState(State.UPDATED)); expectedLogs.verifyInfo( String.format( "Job %s has been updated and is running as the new job with id %s.", JOB_ID, REPLACEMENT_JOB_ID)); }
/** * Tests that the {@link DataflowPipelineJob} understands that the {@link State#DONE DONE} state * is terminal. */ @Test public void testWaitToFinishDone() throws Exception { assertEquals(State.DONE, mockWaitToFinishInState(State.DONE)); expectedLogs.verifyInfo(String.format("Job %s finished with status DONE.", JOB_ID)); }
/** Tests that when reading from an empty table, the read succeeds. */ @Test public void testReadingEmptyTable() throws Exception { final String table = "TEST-EMPTY-TABLE"; service.createTable(table); service.setupSampleRowKeys(table, 1, 1L); runReadTest(defaultRead.withTableId(table), new ArrayList<>()); logged.verifyInfo("Closing reader after reading 0 records."); }
/** Tests reading all rows from a table. */ @Test public void testReading() throws Exception { final String table = "TEST-MANY-ROWS-TABLE"; final int numRows = 1001; List<Row> testRows = makeTableData(table, numRows); service.setupSampleRowKeys(table, 3, 1000L); runReadTest(defaultRead.withTableId(table), testRows); logged.verifyInfo(String.format("Closing reader after reading %d records.", numRows / 3)); }
/** Tests that {@link BigQueryServicesImpl.JobServiceImpl#startLoadJob} succeeds. */ @Test public void testStartLoadJobSucceeds() throws IOException, InterruptedException { Job testJob = new Job(); JobReference jobRef = new JobReference(); jobRef.setJobId("jobId"); jobRef.setProjectId("projectId"); testJob.setJobReference(jobRef); when(response.getContentType()).thenReturn(Json.MEDIA_TYPE); when(response.getStatusCode()).thenReturn(200); when(response.getContent()).thenReturn(toStream(testJob)); Sleeper sleeper = new FastNanoClockAndSleeper(); JobServiceImpl.startJob( testJob, new ApiErrorExtractor(), bigquery, sleeper, BackOffAdapter.toGcpBackOff(FluentBackoff.DEFAULT.backoff())); verify(response, times(1)).getStatusCode(); verify(response, times(1)).getContent(); verify(response, times(1)).getContentType(); expectedLogs.verifyInfo(String.format("Started BigQuery job: %s", jobRef)); }
/** Tests that {@link BigQueryServicesImpl} retries quota rate limited attempts. */ @Test public void testCreateTableRetry() throws IOException { TableReference ref = new TableReference().setProjectId("project").setDatasetId("dataset").setTableId("table"); Table testTable = new Table().setTableReference(ref); // First response is 403 rate limited, second response has valid payload. when(response.getContentType()).thenReturn(Json.MEDIA_TYPE); when(response.getStatusCode()).thenReturn(403).thenReturn(200); when(response.getContent()) .thenReturn(toStream(errorWithReasonAndStatus("rateLimitExceeded", 403))) .thenReturn(toStream(testTable)); BigQueryServicesImpl.DatasetServiceImpl services = new BigQueryServicesImpl.DatasetServiceImpl(bigquery, PipelineOptionsFactory.create()); Table ret = services.tryCreateTable( testTable, new RetryBoundedBackOff(3, BackOff.ZERO_BACKOFF), Sleeper.DEFAULT); assertEquals(testTable, ret); verify(response, times(2)).getStatusCode(); verify(response, times(2)).getContent(); verify(response, times(2)).getContentType(); verifyNotNull(ret.getTableReference()); expectedLogs.verifyInfo( "Quota limit reached when creating table project:dataset.table, " + "retrying up to 5.0 minutes"); }
verify(response, times(2)).getContent(); verify(response, times(2)).getContentType(); expectedLogs.verifyInfo("BigQuery insertAll exceeded rate limit, retrying");
verify(response, times(4)).getContent(); verify(response, times(4)).getContentType(); expectedLogs.verifyInfo("Retrying 1 failed inserts to BigQuery");
/** * Tests that the {@link DataflowRunner} with {@code --templateLocation} returns normally when the * runner is successfully run. */ @Test public void testTemplateRunnerFullCompletion() throws Exception { File existingFile = tmpFolder.newFile(); DataflowPipelineOptions options = PipelineOptionsFactory.as(DataflowPipelineOptions.class); options.setJobName("TestJobName"); options.setGcpCredential(new TestCredential()); options.setPathValidatorClass(NoopPathValidator.class); options.setProject("test-project"); options.setRunner(DataflowRunner.class); options.setTemplateLocation(existingFile.getPath()); options.setTempLocation(tmpFolder.getRoot().getPath()); Pipeline p = Pipeline.create(options); p.run(); expectedLogs.verifyInfo("Template successfully created"); }
false); assertEquals(1, failedInserts.size()); expectedLogs.verifyInfo("Retrying 1 failed inserts to BigQuery");
expectedLogs.verifyWarn("WARNINGWARNING"); expectedLogs.verifyWarn(warningTime.toString()); expectedLogs.verifyInfo("BASICBASIC"); expectedLogs.verifyInfo(basicTime.toString()); expectedLogs.verifyInfo("DETAILEDDETAILED"); expectedLogs.verifyInfo(detailedTime.toString()); expectedLogs.verifyDebug("DEBUGDEBUG"); expectedLogs.verifyDebug(debugTime.toString());