private void testSourceAndReadersWorkP(long splitPointFrequency) throws Exception { PipelineOptions options = PipelineOptionsFactory.create(); testSourceOptions.splitPointFrequencyRecords = splitPointFrequency; SyntheticBoundedSource source = new SyntheticBoundedSource(testSourceOptions); assertEquals(10 * (10 + 20), source.getEstimatedSizeBytes(options)); SourceTestUtils.assertUnstartedReaderReadsSameAsItsSource( source.createReader(options), options); }
/** This test validates that reader and its parent source reads the same records. */ @Test public void testReaderAndParentSourceReadsSameData() throws Exception { InputSplit mockInputSplit = Mockito.mock(NewObjectsEmployeeInputSplit.class); HadoopInputFormatBoundedSource<Text, Employee> boundedSource = new HadoopInputFormatBoundedSource<>( serConf, WritableCoder.of(Text.class), AvroCoder.of(Employee.class), null, // No key translation required. null, // No value translation required. new SerializableSplit(mockInputSplit)); BoundedReader<KV<Text, Employee>> reader = boundedSource.createReader(p.getOptions()); SourceTestUtils.assertUnstartedReaderReadsSameAsItsSource(reader, p.getOptions()); }
/** This test validates that reader and its parent source reads the same records. */ @Test public void testReaderAndParentSourceReadsSameData() throws Exception { InputSplit mockInputSplit = Mockito.mock(NewObjectsEmployeeInputSplit.class); HadoopInputFormatBoundedSource<Text, Employee> boundedSource = new HadoopInputFormatBoundedSource<>( serConf, WritableCoder.of(Text.class), AvroCoder.of(Employee.class), null, // No key translation required. null, // No value translation required. new SerializableSplit(mockInputSplit)); BoundedReader<KV<Text, Employee>> reader = boundedSource.createReader(p.getOptions()); SourceTestUtils.assertUnstartedReaderReadsSameAsItsSource(reader, p.getOptions()); }