/** * Performs a semi-deep copy/clone but does not clone the rows from the Result * * @return An almost-clone of the Result, minus the rows */ public Result lightClone() { // This light-weight clone doesn't clone rows try { Result result = (Result) super.clone(); result.setRows( null ); if ( resultFiles != null ) { Map<String, ResultFile> clonedFiles = new ConcurrentHashMap<String, ResultFile>(); Collection<ResultFile> files = resultFiles.values(); for ( ResultFile file : files ) { clonedFiles.put( file.getFile().toString(), file.clone() ); } result.setResultFiles( clonedFiles ); } return result; } catch ( CloneNotSupportedException e ) { return null; } }
clonedRows.add( ( rows.get( i ) ).clone() ); result.setRows( clonedRows );
public Optional<Result> execute( List<RowMetaAndData> rows ) throws KettleException { if ( rows.isEmpty() || stopped ) { return Optional.empty(); } Trans subtrans = this.createSubtrans(); running.add( subtrans ); parentTrans.addActiveSubTransformation( subTransName, subtrans ); // Pass parameter values passParametersToTrans( subtrans, rows.get( 0 ) ); Result result = new Result(); result.setRows( rows ); subtrans.setPreviousResult( result ); subtrans.prepareExecution( this.parentTrans.getArguments() ); List<RowMetaAndData> rowMetaAndData = new ArrayList<>(); subtrans.getSteps().stream() .filter( c -> c.step.getStepname().equalsIgnoreCase( subStep ) ) .findFirst() .ifPresent( c -> c.step.addRowListener( new RowAdapter() { @Override public void rowWrittenEvent( RowMetaInterface rowMeta, Object[] row ) { rowMetaAndData.add( new RowMetaAndData( rowMeta, row ) ); } } ) ); subtrans.startThreads(); subtrans.waitUntilFinished(); updateStatuses( subtrans ); running.remove( subtrans ); Result subtransResult = subtrans.getResult(); subtransResult.setRows( rowMetaAndData ); return Optional.of( subtransResult ); }
@Test public void filesPath_AreProcessed_ArgsOfPreviousMeta() throws Exception { jobEntry.setArgFromPrevious( true ); Result prevMetaResult = new Result(); List<RowMetaAndData> metaAndDataList = new ArrayList<>(); metaAndDataList.add( constructRowMetaAndData( PATH_TO_FILE, null ) ); prevMetaResult.setRows( metaAndDataList ); jobEntry.execute( prevMetaResult, 0 ); verify( jobEntry, times( metaAndDataList.size() ) ).processFile( anyString(), anyString(), any( Job.class ) ); }
@Test public void filesWithNoPath_AreNotProcessed_ArgsOfPreviousMeta() throws Exception { jobEntry.setArgFromPrevious( true ); Result prevMetaResult = new Result(); List<RowMetaAndData> metaAndDataList = new ArrayList<>(); metaAndDataList.add( constructRowMetaAndData( Const.EMPTY_STRING, null ) ); metaAndDataList.add( constructRowMetaAndData( STRING_SPACES_ONLY, null ) ); prevMetaResult.setRows( metaAndDataList ); jobEntry.execute( prevMetaResult, 0 ); verify( jobEntry, never() ).processFile( anyString(), anyString(), any( Job.class ) ); }
@Test public void testStop() throws KettleException { Result result = new Result(); result.setSafeStop( false ); result.setRows( Collections.emptyList() ); when( streamWindow.buffer( any() ) ).thenReturn( Collections.singletonList( result ) ); baseStreamStep.processRow( meta, stepData ); assertFalse( baseStreamStep.isSafeStopped() ); verify( streamSource ).close(); }
@Test public void supportsPostProcessing() throws KettleException { RowMetaInterface rowMeta = new RowMeta(); rowMeta.addValueMeta( new ValueMetaString( "field" ) ); Result mockResult = new Result(); mockResult.setRows( Arrays.asList( new RowMetaAndData( rowMeta, "queen" ), new RowMetaAndData( rowMeta, "king" ) ) ); when( subtransExecutor.execute( any() ) ).thenReturn( Optional.of( mockResult ) ); AtomicInteger count = new AtomicInteger(); FixedTimeStreamWindow<List> window = new FixedTimeStreamWindow<>( subtransExecutor, rowMeta, 0, 2, 1, ( p) -> count.set( p.getKey().get( 0 ).size() ) ); window.buffer( Flowable.fromIterable( singletonList( asList( "v1", "v2" ) ) ) ) .forEach( result -> assertEquals( mockResult, result ) ); assertEquals( 2, count.get() ); } }
@Test public void resultsComeBackToParent() throws KettleException { RowMetaInterface rowMeta = new RowMeta(); rowMeta.addValueMeta( new ValueMetaString( "field" ) ); Result mockResult = new Result(); mockResult.setRows( Arrays.asList( new RowMetaAndData( rowMeta, "queen" ), new RowMetaAndData( rowMeta, "king" ) ) ); when( subtransExecutor.execute( any() ) ).thenReturn( Optional.of( mockResult ) ); FixedTimeStreamWindow<List> window = new FixedTimeStreamWindow<>( subtransExecutor, rowMeta, 0, 2, 1 ); window.buffer( Flowable.fromIterable( singletonList( asList( "v1", "v2" ) ) ) ) .forEach( result -> assertEquals( mockResult, result ) ); }
@Test public void testGetXML() throws KettleException { SlaveServerTransStatus transStatus = new SlaveServerTransStatus(); RowMetaAndData rowMetaAndData = new RowMetaAndData(); String testData = "testData"; rowMetaAndData.addValue( new ValueMetaString(), testData ); List<RowMetaAndData> rows = new ArrayList<>(); rows.add( rowMetaAndData ); Result result = new Result(); result.setRows( rows ); transStatus.setResult( result ); //PDI-15781 Assert.assertFalse( transStatus.getXML().contains( testData ) ); //PDI-17061 Assert.assertTrue( transStatus.getXML( true ).contains( testData ) ); } }
@Test public void testHTTPResultDefaultRows() throws IOException { File localFileForUpload = getInputFile( "existingFile1", ".tmp" ); File tempFileForDownload = File.createTempFile( "downloadedFile1", ".tmp" ); localFileForUpload.deleteOnExit(); tempFileForDownload.deleteOnExit(); Object[] r = new Object[] { HTTP_SERVER_BASEURL + "/uploadFile", localFileForUpload.getCanonicalPath(), tempFileForDownload.getCanonicalPath() }; RowMeta rowMetaDefault = new RowMeta(); rowMetaDefault.addValueMeta( new ValueMetaString( "URL" ) ); rowMetaDefault.addValueMeta( new ValueMetaString( "UPLOAD" ) ); rowMetaDefault.addValueMeta( new ValueMetaString( "DESTINATION" ) ); List<RowMetaAndData> rows = new ArrayList<RowMetaAndData>(); rows.add( new RowMetaAndData( rowMetaDefault, r ) ); Result previousResult = new Result(); previousResult.setRows( rows ); JobEntryHTTP http = new JobEntryHTTP(); http.setParentJob( new Job() ); http.setRunForEveryRow( true ); http.setAddFilenameToResult( false ); http.execute( previousResult, 0 ); assertTrue( FileUtils.contentEquals( localFileForUpload, tempFileForDownload ) ); }
result.setRows( resultRows ); if ( !Utils.isEmpty( resultFiles ) ) { result.setResultFiles( new HashMap<String, ResultFile>() );
@Test public void testHTTPResultCustomRows() throws IOException { File localFileForUpload = getInputFile( "existingFile2", ".tmp" ); File tempFileForDownload = File.createTempFile( "downloadedFile2", ".tmp" ); localFileForUpload.deleteOnExit(); tempFileForDownload.deleteOnExit(); Object[] r = new Object[] { HTTP_SERVER_BASEURL + "/uploadFile", localFileForUpload.getCanonicalPath(), tempFileForDownload.getCanonicalPath() }; RowMeta rowMetaDefault = new RowMeta(); rowMetaDefault.addValueMeta( new ValueMetaString( "MyURL" ) ); rowMetaDefault.addValueMeta( new ValueMetaString( "MyUpload" ) ); rowMetaDefault.addValueMeta( new ValueMetaString( "MyDestination" ) ); List<RowMetaAndData> rows = new ArrayList<RowMetaAndData>(); rows.add( new RowMetaAndData( rowMetaDefault, r ) ); Result previousResult = new Result(); previousResult.setRows( rows ); JobEntryHTTP http = new JobEntryHTTP(); http.setParentJob( new Job() ); http.setRunForEveryRow( true ); http.setAddFilenameToResult( false ); http.setUrlFieldname( "MyURL" ); http.setUploadFieldname( "MyUpload" ); http.setDestinationFieldname( "MyDestination" ); http.execute( previousResult, 0 ); assertTrue( FileUtils.contentEquals( localFileForUpload, tempFileForDownload ) ); }
result.setRows( transExecutorData.groupBuffer ); executorTrans.setPreviousResult( result );
result.setRows( getSourceRows() );