mutations.apply(SpannerIO.write() .withInstanceId(instanceId) .withDatabaseId(databaseId)
@Test public void emptyTransform() throws Exception { SpannerIO.Write write = SpannerIO.write(); thrown.expect(NullPointerException.class); thrown.expectMessage("requires instance id to be set with"); write.expand(null); }
@Test public void emptyDatabaseId() throws Exception { SpannerIO.Write write = SpannerIO.write().withInstanceId("123"); thrown.expect(NullPointerException.class); thrown.expectMessage("requires database id to be set with"); write.expand(null); }
@Test public void emptyInstanceId() throws Exception { SpannerIO.Write write = SpannerIO.write().withDatabaseId("123"); thrown.expect(NullPointerException.class); thrown.expectMessage("requires instance id to be set with"); write.expand(null); }
.apply(Create.of(mutationGroupList)) .apply( SpannerIO.write() .withProjectId("test-project") .withInstanceId("test-instance")
@Test public void testWrite() throws Exception { int numRecords = 100; p.apply(GenerateSequence.from(0).to(numRecords)) .apply(ParDo.of(new GenerateMutations(options.getTable()))) .apply( SpannerIO.write() .withProjectId(project) .withInstanceId(options.getInstanceId()) .withDatabaseId(databaseName)); PipelineResult result = p.run(); result.waitUntilFinish(); assertThat(result.getState(), is(PipelineResult.State.DONE)); assertThat(countNumberOfRecords(), equalTo((long) numRecords)); }
@Test public void displayData() throws Exception { SpannerIO.Write write = SpannerIO.write() .withProjectId("test-project") .withInstanceId("test-instance") .withDatabaseId("test-database") .withBatchSizeBytes(123); DisplayData data = DisplayData.from(write); assertThat(data.items(), hasSize(4)); assertThat(data, hasDisplayItem("projectId", "test-project")); assertThat(data, hasDisplayItem("instanceId", "test-instance")); assertThat(data, hasDisplayItem("databaseId", "test-database")); assertThat(data, hasDisplayItem("batchSizeBytes", 123)); }
@Test public void testReportFailures() throws Exception { int numRecords = 100; p.apply(GenerateSequence.from(0).to(2 * numRecords)) .apply(ParDo.of(new GenerateMutations(options.getTable(), new DivBy2()))) .apply( SpannerIO.write() .withProjectId(project) .withInstanceId(options.getInstanceId()) .withDatabaseId(databaseName) .withFailureMode(SpannerIO.FailureMode.REPORT_FAILURES)); PipelineResult result = p.run(); result.waitUntilFinish(); assertThat(result.getState(), is(PipelineResult.State.DONE)); assertThat(countNumberOfRecords(), equalTo((long) numRecords)); }
@Test @Category(NeedsRunner.class) public void singleMutationPipeline() throws Exception { Mutation mutation = m(2L); PCollection<Mutation> mutations = pipeline.apply(Create.of(mutation)); mutations.apply( SpannerIO.write() .withProjectId("test-project") .withInstanceId("test-instance") .withDatabaseId("test-database") .withServiceFactory(serviceFactory)); pipeline.run(); verifyBatches(batch(m(2L))); }
@Test public void testSequentialWrite() throws Exception { int numRecords = 100; SpannerWriteResult stepOne = p.apply("first step", GenerateSequence.from(0).to(numRecords)) .apply(ParDo.of(new GenerateMutations(options.getTable()))) .apply( SpannerIO.write() .withProjectId(project) .withInstanceId(options.getInstanceId()) .withDatabaseId(databaseName)); p.apply("second step", GenerateSequence.from(numRecords).to(2 * numRecords)) .apply("Gen mutations", ParDo.of(new GenerateMutations(options.getTable()))) .apply(Wait.on(stepOne.getOutput())) .apply( "write to table2", SpannerIO.write() .withProjectId(project) .withInstanceId(options.getInstanceId()) .withDatabaseId(databaseName)); PipelineResult result = p.run(); result.waitUntilFinish(); assertThat(result.getState(), is(PipelineResult.State.DONE)); assertThat(countNumberOfRecords(), equalTo(2L * numRecords)); }
@Test @Category(NeedsRunner.class) public void singleMutationGroupPipeline() throws Exception { PCollection<MutationGroup> mutations = pipeline.apply(Create.<MutationGroup>of(g(m(1L), m(2L), m(3L)))); mutations.apply( SpannerIO.write() .withProjectId("test-project") .withInstanceId("test-instance") .withDatabaseId("test-database") .withServiceFactory(serviceFactory) .grouped()); pipeline.run(); verifyBatches(batch(m(1L), m(2L), m(3L))); }
@Test @Category(NeedsRunner.class) public void noBatching() throws Exception { PCollection<MutationGroup> mutations = pipeline.apply(Create.of(g(m(1L)), g(m(2L)))); mutations.apply( SpannerIO.write() .withProjectId("test-project") .withInstanceId("test-instance") .withDatabaseId("test-database") .withServiceFactory(serviceFactory) .withBatchSizeBytes(1) .grouped()); pipeline.run(); verifyBatches(batch(m(1L)), batch(m(2L))); }
@Test public void testFailFast() throws Exception { thrown.expect(new StackTraceContainsString("SpannerException")); thrown.expect(new StackTraceContainsString("Value must not be NULL in table users")); int numRecords = 100; p.apply(GenerateSequence.from(0).to(2 * numRecords)) .apply(ParDo.of(new GenerateMutations(options.getTable(), new DivBy2()))) .apply( SpannerIO.write() .withProjectId(project) .withInstanceId(options.getInstanceId()) .withDatabaseId(databaseName)); PipelineResult result = p.run(); result.waitUntilFinish(); }
@Test @Category({NeedsRunner.class, UsesTestStream.class}) public void streamingWrites() throws Exception { TestStream<Mutation> testStream = TestStream.create(SerializableCoder.of(Mutation.class)) .addElements(m(1L), m(2L)) .advanceProcessingTime(Duration.standardMinutes(1)) .addElements(m(3L), m(4L)) .advanceProcessingTime(Duration.standardMinutes(1)) .addElements(m(5L), m(6L)) .advanceWatermarkToInfinity(); pipeline .apply(testStream) .apply( SpannerIO.write() .withProjectId("test-project") .withInstanceId("test-instance") .withDatabaseId("test-database") .withServiceFactory(serviceFactory)); pipeline.run(); verifyBatches(batch(m(1L), m(2L)), batch(m(3L), m(4L)), batch(m(5L), m(6L))); }
.apply("Wait for previous depth " + depth, Wait.on(previousComputation)) .apply( "Write mutations " + depth, SpannerIO.write().withSpannerConfig(spannerConfig)); previousComputation = result.getOutput();