private synchronized void updateStatuses( Trans subtrans ) { List<StepMetaDataCombi> steps = subtrans.getSteps(); for ( StepMetaDataCombi combi : steps ) { StepStatus stepStatus; if ( statuses.containsKey( combi.stepname ) ) { stepStatus = statuses.get( combi.stepname ); stepStatus.updateAll( combi.step ); } else { stepStatus = new StepStatus( combi.step ); statuses.put( combi.stepname, stepStatus ); } stepStatus.setStatusDescription( StepExecutionStatus.STATUS_RUNNING.getDescription() ); } }
/** If the transformation has at least one step in a transformation, which writes it's data straight to a servlet output we should wait transformation's termination. Otherwise the servlet's response lifecycle may come to an end and the response will be closed by container while the transformation will be still trying writing data into it. */ @VisibleForTesting void finishProcessing( Trans trans, PrintWriter out ) { if ( trans.getSteps().stream().anyMatch( step -> step.meta.passDataToServletOutput() ) ) { trans.waitUntilFinished(); } else { WebResult webResult = new WebResult( WebResult.STRING_OK, "Transformation started", trans.getContainerObjectId() ); out.println( webResult.getXML() ); out.flush(); } }
private StepInterface getStepByName( String name ) { List<StepMetaDataCombi> combiList = trans.getSteps(); for ( StepMetaDataCombi item : combiList ) { if ( item.step.toString().equals( name ) ) { return item.step; } } fail( "Test error, can't find step with name: " + name ); // and this will never happens. return null; }
public void dispose() throws KettleException { // Call output done. // for ( StepMetaDataCombi combi : trans.getSteps() ) { combi.step.setOutputDone(); } // Finalize all the steps... // for ( StepMetaDataCombi combi : steps ) { combi.step.dispose( combi.meta, combi.data ); combi.step.markStop(); } }
public SingleThreadedTransExecutor( final Trans trans ) { this.trans = trans; this.log = trans.getLogChannel(); steps = trans.getSteps(); // Always disable thread priority management, it will always slow us down... // for ( StepMetaDataCombi combi : steps ) { combi.step.setUsingThreadPriorityManagment( false ); } sortSteps(); done = new boolean[steps.size()]; nrDone = 0; stepInfoStreams = new ArrayList<List<StreamInterface>>(); stepInfoRowSets = new ArrayList<List<RowSet>>(); for ( StepMetaDataCombi combi : steps ) { List<StreamInterface> infoStreams = combi.stepMeta.getStepMetaInterface().getStepIOMeta().getInfoStreams(); stepInfoStreams.add( infoStreams ); List<RowSet> infoRowSets = new ArrayList<RowSet>(); for ( StreamInterface infoStream : infoStreams ) { RowSet infoRowSet = trans.findRowSet( infoStream.getStepname(), 0, combi.stepname, 0 ); if ( infoRowSet != null ) { infoRowSets.add( infoRowSet ); } } stepInfoRowSets.add( infoRowSets ); } }
@Override public void run() { for ( StepMetaDataCombi combi : trans.getSteps() ) { if ( combi.step.getErrors() > 0 ) { String channelId = combi.step.getLogChannel().getLogChannelId(); List<KettleLoggingEvent> eventList = KettleLogStore.getLogBufferFromTo( channelId, false, 0, KettleLogStore.getLastBufferLineNr() ); StringBuilder logText = new StringBuilder(); for ( KettleLoggingEvent event : eventList ) { Object message = event.getMessage(); if ( message instanceof LogMessage ) { LogMessage logMessage = (LogMessage) message; if ( logMessage.isError() ) { logText.append( logMessage.getMessage() ).append( Const.CR ); } } } stepLogMap.put( combi.stepMeta, logText.toString() ); } } } } );
@Override public long getLinesOutput() { if ( getData() != null && getData().linesOutputStepNr != -1 ) { return getData().getMappingTrans().getSteps().get( getData().linesOutputStepNr ).step.getLinesOutput(); } else { return 0; } }
@Override public long getLinesWritten() { if ( getData() != null && getData().linesWrittenStepNr != -1 ) { return getData().getMappingTrans().getSteps().get( getData().linesWrittenStepNr ).step.getLinesWritten(); } else { return 0; } }
@Override public long getLinesInput() { if ( getData() != null && getData().linesInputStepNr != -1 ) { return getData().getMappingTrans().getSteps().get( getData().linesInputStepNr ).step.getLinesInput(); } else { return 0; } }
@Override public long getLinesRead() { if ( getData() != null && getData().linesReadStepNr != -1 ) { return getData().getMappingTrans().getSteps().get( getData().linesReadStepNr ).step.getLinesRead(); } else { return 0; } }
@Override public long getLinesRejected() { if ( getData() != null && getData().linesRejectedStepNr != -1 ) { return getData().getMappingTrans().getSteps().get( getData().linesRejectedStepNr ).step.getLinesRejected(); } else { return 0; } }
@Override public void transFinished( Trans trans ) throws KettleException { // Copy over the data from the previewDelegate... // if ( trans.getErrors() != 0 ) { // capture logging and store it... // for ( StepMetaDataCombi combi : trans.getSteps() ) { if ( combi.copy == 0 ) { StringBuffer logBuffer = KettleLogStore.getAppender().getBuffer( combi.step.getLogChannel().getLogChannelId(), false ); previewLogMap.put( combi.stepMeta, logBuffer ); } } } } } );
@Override public long getLinesUpdated() { if ( getData() != null && getData().linesUpdatedStepNr != -1 ) { return getData().getMappingTrans().getSteps().get( getData().linesUpdatedStepNr ).step.getLinesUpdated(); } else { return 0; } }
@Before public void setup() throws Exception { runTransServlet = new RunTransServlet(); outData = new ByteArrayOutputStream(); out = new PrintWriter( outData ); stepList = new ArrayList<>(); for ( int i = 0; i < 5; i++ ) { StepMetaDataCombi stepMetaDataCombi = new StepMetaDataCombi(); StepMetaInterface stepMeta = mock( StepMetaInterface.class ); when( stepMeta.passDataToServletOutput() ).thenReturn( false ); stepMetaDataCombi.meta = stepMeta; stepList.add( stepMetaDataCombi ); } when( trans.getSteps() ).thenReturn( stepList ); when( trans.getContainerObjectId() ).thenReturn( transId ); }
@Test public void doesNotWriteRowOnTimeWhenStopped() throws KettleException, InterruptedException { TransMeta transMeta = new TransMeta( getClass().getResource( "safe-stop.ktr" ).getPath() ); Trans trans = new Trans( transMeta ); trans.prepareExecution( new String[] {} ); trans.getSteps().get( 1 ).step.addRowListener( new RowAdapter() { @Override public void rowWrittenEvent( RowMetaInterface rowMeta, Object[] row ) throws KettleStepException { trans.safeStop(); } } ); trans.startThreads(); trans.waitUntilFinished(); assertEquals( 1, trans.getSteps().get( 0 ).step.getLinesWritten() ); assertEquals( 1, trans.getSteps().get( 1 ).step.getLinesRead() ); } }
@Test public void testRecordsCleanUpMethodIsCalled() throws Exception { Database mockedDataBase = mock( Database.class ); Trans trans = mock( Trans.class ); StepLogTable stepLogTable = StepLogTable.getDefault( mock( VariableSpace.class ), mock( HasDatabasesInterface.class ) ); stepLogTable.setConnectionName( "connection" ); TransMeta transMeta = new TransMeta(); transMeta.setStepLogTable( stepLogTable ); when( trans.getTransMeta() ).thenReturn( transMeta ); when( trans.createDataBase( any( DatabaseMeta.class ) ) ).thenReturn( mockedDataBase ); when( trans.getSteps() ).thenReturn( new ArrayList<>() ); doCallRealMethod().when( trans ).writeStepLogInformation(); trans.writeStepLogInformation(); verify( mockedDataBase ).cleanupLogRecords( stepLogTable ); }
@Before public void setUp() throws KettleException { KettleLogStore.setLogChannelInterfaceFactory( logChannelFactory ); when( logChannelFactory.create( any(), any() ) ).thenReturn( logChannel ); StepMeta stepMeta = new StepMeta( "BaseStreamStep", meta ); TransMeta transMeta = new TransMeta(); transMeta.addStep( stepMeta ); Trans trans = new Trans( transMeta ); baseStreamStep = new BaseStreamStep( stepMeta, stepData, 1, transMeta, trans ); baseStreamStep.source = streamSource; baseStreamStep.window = streamWindow; baseStreamStep.setParentVariableSpace( new Variables() ); StepMetaDataCombi stepMetaDataCombi = new StepMetaDataCombi(); stepMetaDataCombi.step = baseStreamStep; stepMetaDataCombi.data = stepData; stepMetaDataCombi.stepMeta = stepMeta; stepMetaDataCombi.meta = meta; trans.prepareExecution( new String[ 0 ] ); trans.getSteps().add( stepMetaDataCombi ); }
/** * This checks transformation initialization when using one to many copies * * @throws KettleException */ @Test public void testOneToManyCopies() throws KettleException { prepareStepMetas_1_x2(); trans.prepareExecution( new String[] {} ); List<RowSet> rowsets = trans.getRowsets(); assertTrue( !rowsets.isEmpty() ); assertEquals( "We have 2 rowsets finally", 2, rowsets.size() ); assertEquals( "We have 3 steps: one producer and 2 copies of consumer", 3, trans.getSteps().size() ); // Ok, examine initialized steps now. StepInterface stepOne = getStepByName( S10 ); assertTrue( "1 step have no input row sets", stepOne.getInputRowSets().isEmpty() ); assertEquals( "1 step have 2 output rowsets", 2, stepOne.getOutputRowSets().size() ); StepInterface stepTwo0 = getStepByName( S20 ); Assert.assertEquals( "2.0 step have 12 input row sets", 1, stepTwo0.getInputRowSets().size() ); Assert.assertTrue( "2.0 step have no output row sets", stepTwo0.getOutputRowSets().isEmpty() ); StepInterface stepTwo1 = getStepByName( S21 ); Assert.assertEquals( "2.1 step have 1 input row sets", 1, stepTwo1.getInputRowSets().size() ); Assert.assertTrue( "2.1 step have no output row sets", stepTwo1.getOutputRowSets().isEmpty() ); }
/** * Test one to one partitioning step transformation organization. * * @throws KettleException */ @Test public void testOneToPartitioningSchema() throws KettleException { prepareStepMetas_1_cl1(); trans.prepareExecution( new String[] {} ); List<RowSet> rowsets = trans.getRowsets(); assertTrue( !rowsets.isEmpty() ); assertEquals( "We have 2 rowsets finally", 2, rowsets.size() ); assertEquals( "We have 3 steps: 1 producer and 2 copies of consumer since it is partitioned", 3, trans.getSteps() .size() ); // Ok, examine initialized steps now. StepInterface stepOne0 = getStepByName( S10 ); assertTrue( "1 step have no input row sets", stepOne0.getInputRowSets().isEmpty() ); assertEquals( "1 step have 2 output rowsets", 2, stepOne0.getOutputRowSets().size() ); StepInterface stepTwo0 = getStepByName( SP20 ); assertEquals( "2.0 step have one input row sets", 1, stepTwo0.getInputRowSets().size() ); assertTrue( "2.0 step have no output rowsets", stepTwo0.getOutputRowSets().isEmpty() ); StepInterface stepTwo1 = getStepByName( SP21 ); Assert.assertEquals( "2.1 step have 1 input row sets", 1, stepTwo1.getInputRowSets().size() ); Assert.assertTrue( "2.1 step have no output row sets", stepTwo1.getOutputRowSets().isEmpty() ); }
/** * This checks transformation initialization when using many copies to one next step * * @throws KettleException */ @Test public void testManyToOneCopies() throws KettleException { prepareStepMetas_x2_1(); trans.prepareExecution( new String[] {} ); List<RowSet> rowsets = trans.getRowsets(); assertTrue( !rowsets.isEmpty() ); assertEquals( "We have 2 rowsets finally", 2, rowsets.size() ); assertEquals( "We have 4 steps: 2 copies of producer and 2 copies of consumer", 3, trans.getSteps().size() ); // Ok, examine initialized steps now. StepInterface stepOne0 = getStepByName( S10 ); assertTrue( "1 step have no input row sets", stepOne0.getInputRowSets().isEmpty() ); assertEquals( "1 step have 1 output rowsets", 1, stepOne0.getOutputRowSets().size() ); StepInterface stepOne1 = getStepByName( S11 ); assertTrue( "1 step have no input row sets", stepOne1.getInputRowSets().isEmpty() ); assertEquals( "1 step have 1 output rowsets", 1, stepOne1.getOutputRowSets().size() ); StepInterface stepTwo0 = getStepByName( S20 ); Assert.assertEquals( "2.0 step have 2 input row sets", 2, stepTwo0.getInputRowSets().size() ); Assert.assertTrue( "2.0 step have no output row sets", stepTwo0.getOutputRowSets().isEmpty() ); }