@Test public void testRunWithExceptionOnExecuteSetsResult() throws Exception { when( mockJob.isStopped() ).thenReturn( false ); when( mockJob.getParentJob() ).thenReturn( parentJob ); when( parentJob.isStopped() ).thenReturn( false ); doThrow( KettleException.class ).when( mockJob ).execute( anyInt(), any( Result.class ) ); jobRunner.run(); verify( mockJob, times( 1 ) ).setResult( Mockito.any( Result.class ) ); }
/** * Gets the job name. * * @return jobName */ public String getObjectName() { return getJobname(); }
CarteObjectEntry entry = CarteResource.getCarteObjectEntry( id ); try { if ( job.isInitialized() && !job.isActive() ) { if ( job.getRep() != null && !job.getRep().isConnected() ) { if ( job.getRep().getUserInfo() != null ) { job.getRep().connect( job.getRep().getUserInfo().getLogin(), job.getRep().getUserInfo().getPassword() ); } else { job.getRep().connect( null, null ); servletLoggingObject.setContainerObjectId( carteObjectId ); Job newJob = new Job( job.getRep(), job.getJobMeta(), servletLoggingObject ); newJob.setLogLevel( job.getLogLevel() ); KettleLogStore.discardLines( job.getLogChannelId(), true ); job.start(); } catch ( KettleException e ) { e.printStackTrace();
/** * Sets the parent job. * * @param parentJob * the new parent job */ public void setParentJob( Job parentJob ) { this.parentJob = parentJob; this.logLevel = parentJob.getLogLevel(); this.log = new LogChannel( this, parentJob ); this.containerObjectId = parentJob.getContainerObjectId(); }
public String getStatus() { String message; if ( isActive() ) { if ( isStopped() ) { message = Trans.STRING_HALTING; } else { message = Trans.STRING_RUNNING; } } else if ( isFinished() ) { message = Trans.STRING_FINISHED; if ( getResult().getNrErrors() > 0 ) { message += " (with errors)"; } } else if ( isStopped() ) { message = Trans.STRING_STOPPED; if ( getResult().getNrErrors() > 0 ) { message += " (with errors)"; } } else { message = Trans.STRING_WAITING; } return message; }
public synchronized void registerJob( Job job, JobConfiguration jobConfiguration ) { job.setContainerObjectId( UUID.randomUUID().toString() ); CarteObjectEntry entry = new CarteObjectEntry( job.getJobMeta().getName(), job.getContainerObjectId() ); jobMap.put( entry, job ); configurationMap.put( entry, jobConfiguration ); }
LogLevel jobLogLevel = parentJob.getLogLevel(); if ( parentJob.getJobMeta() != null ) { parentJob.getJobMeta().setInternalKettleVariables(); args1 = parentJob.getArguments(); String[] parentParameters = parentJob.listParameters(); for ( int idx = 0; idx < parentParameters.length; idx++ ) { String par = parentParameters[idx]; String def = parentJob.getParameterDefault( par ); String val = parentJob.getParameterValue( par ); String des = parentJob.getParameterDescription( par ); try { ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.SpoonTransBeforeStart.id, new Object[] { executionConfiguration, parentJob.getJobMeta(), jobMeta, rep } ); List<Object> items = Arrays.asList( runConfiguration, false ); .RunConfigurationSelection.id, items ); if ( waitingToFinish && (Boolean) items.get( IS_PENTAHO ) ) { String jobName = parentJob.getJobMeta().getName(); String name = jobMeta.getName(); logBasic( BaseMessages.getString( PKG, "JobJob.Log.InvalidRunConfigurationCombination", jobName, remoteSlaveServer = parentJob.getJobMeta().findSlaveServer( realRemoteSlaveServerName ); if ( remoteSlaveServer == null ) {
if ( job == null || ( job.isFinished() || job.isStopped() ) && !job.isActive() ) { if ( job == null || ( job != null && !job.isActive() ) ) { try { KettleLogStore.discardLines( job.getLogChannelId(), true ); spoonLoggingObject.setContainerObjectId( spoonObjectId ); spoonLoggingObject.setLogLevel( executionConfiguration.getLogLevel() ); job = new Job( spoon.rep, runJobMeta, spoonLoggingObject ); job.setLogLevel( executionConfiguration.getLogLevel() ); job.shareVariablesWith( jobMeta ); job.setInteractive( true ); job.setGatheringMetrics( executionConfiguration.isGatheringMetrics() ); job.setArguments( executionConfiguration.getArgumentStrings() ); job.getExtensionDataMap().putAll( executionConfiguration.getExtensionOptions() ); job.addJobEntryListener( createRefreshJobEntryListener() ); runJobMeta.findJobEntry( executionConfiguration.getStartCopyName(), executionConfiguration .getStartCopyNr(), false ); job.setStartJobEntryCopy( startJobEntryCopy ); Set<String> keys = paramMap.keySet(); for ( String key : keys ) { job.getJobMeta().setParameterValue( key, Const.NVL( paramMap.get( key ), "" ) );
final Job job = new Job( repository, jobMeta, servletLoggingObject ); job.initializeVariablesFrom( null ); job.getJobMeta().setInternalKettleVariables( job ); job.injectVariables( jobConfiguration.getJobExecutionConfiguration().getVariables() ); job.setArguments( jobExecutionConfiguration.getArgumentStrings() ); job.copyParametersFrom( jobMeta ); job.clearParameters(); String[] parameterNames = job.listParameters(); for ( int idx = 0; idx < parameterNames.length; idx++ ) { job.setSocketRepository( CarteSingleton.getInstance().getSocketRepository() ); CarteSingleton.getInstance().getJobMap().addJob( job.getJobname(), carteObjectId, job, jobConfiguration ); job.addJobListener( new JobAdapter() { public void jobFinished( Job job ) { repository.disconnect();
LogLevel transLogLevel = parentJob.getLogLevel(); String[] args1 = arguments; if ( args1 == null || args1.length == 0 ) { // No arguments set, look at the parent job. args1 = parentJob.getArguments(); && !parentJob.isStopped() ) { try { ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.SpoonTransBeforeStart.id, new Object[] { executionConfiguration, parentJob.getJobMeta(), transMeta, rep } ); List<Object> items = Arrays.asList( runConfiguration, false ); .RunConfigurationSelection.id, items ); if ( waitingToFinish && (Boolean) items.get( IS_PENTAHO ) ) { String jobName = parentJob.getJobMeta().getName(); String name = transMeta.getName(); logBasic( BaseMessages.getString( PKG, "JobTrans.Log.InvalidRunConfigurationCombination", jobName, remoteSlaveServer = parentJob.getJobMeta().findSlaveServer( realRemoteSlaveServerName ); if ( remoteSlaveServer == null ) { throw new KettleException( BaseMessages.getString( if ( parentJob.getJobMeta().isBatchIdPassed() ) { executionConfiguration.setPassedBatchId( parentJob.getPassedBatchId() ); if ( parentJob.getJobMeta().isBatchIdPassed() ) { executionConfiguration.setPassedBatchId( parentJob.getPassedBatchId() );
final Job job = new Job( repository, jobMeta, servletLoggingObject ); job.initializeVariablesFrom( null ); job.getJobMeta().setMetaStore( jobMap.getSlaveServerConfig().getMetaStore() ); job.getJobMeta().setInternalKettleVariables( job ); job.injectVariables( jobConfiguration.getJobExecutionConfiguration().getVariables() ); job.setArguments( jobExecutionConfiguration.getArgumentStrings() ); job.setSocketRepository( getSocketRepository() ); int startCopyNr = jobExecutionConfiguration.getStartCopyNr(); JobEntryCopy startJobEntryCopy = jobMeta.findJobEntry( startCopyName, startCopyNr, false ); job.setStartJobEntryCopy( startJobEntryCopy ); job.addDelegationListener( new CarteDelegationHandler( getTransformationMap(), getJobMap() ) ); job.addJobListener( new JobAdapter() { public void jobFinished( Job job ) { repository.disconnect(); getJobMap().addJob( job.getJobname(), carteObjectId, job, jobConfiguration ); job.setPassedBatchId( passedBatchId );
final Job job = new Job( repository, jobMeta, servletLoggingObject ); job.initializeVariablesFrom( null ); job.getJobMeta().setInternalKettleVariables( job ); job.injectVariables( jobConfiguration.getJobExecutionConfiguration().getVariables() ); job.copyParametersFrom( jobMeta ); job.clearParameters(); job.setSocketRepository( getSocketRepository() ); jobMap.addJob( job.getJobname(), carteObjectId, job, jobConfiguration ); job.addJobListener( new JobAdapter() { public void jobFinished( Job job ) { repository.disconnect(); String message = "Job '" + job.getJobname() + "' was added to the list with id " + carteObjectId; logBasic( message ); String logging = KettleLogStore.getAppender().getBuffer( job.getLogChannelId(), false ).toString(); throw new KettleException( "Error executing Job: " + logging, executionException );
appender = LogWriter.createFileAppender( logFileName, true, false ); LogWriter.getInstance().addAppender( appender ); log.setLogLevel( parentJob.getLogLevel() ); } catch ( Exception e ) { logError( BaseMessages logDetailed( BaseMessages .getString( JobEntryHadoopJobExecutor.class, "JobEntryHadoopJobExecutor.Blocking", mainClass ) ); } while ( !parentJob.isStopped() && !done ); if ( !done ) { mapReduceJobSimple.killJob(); if ( parentJob.isStopped() && !mapReduceJobAdvanced.isComplete() ) { new ResultFile( ResultFile.FILE_TYPE_LOG, appender.getFile(), parentJob.getJobname(), getName() ); result.getResultFiles().put( resultFile.getFile().toString(), resultFile );
@Test public void testRunWithException() throws Exception { when( mockJob.isStopped() ).thenReturn( false ); when( mockJob.getParentJob() ).thenReturn( parentJob ); when( mockJob.getJobMeta() ).thenReturn( mockJobMeta ); when( parentJob.isStopped() ).thenReturn( false ); doThrow( KettleException.class ).when( mockJob ).execute( anyInt(), any( Result.class ) ); jobRunner.run(); verify( mockResult, times( 1 ) ).setNrErrors( Mockito.anyInt() ); //[PDI-14981] catch more general exception to prevent thread hanging doThrow( Exception.class ).when( mockJob ).fireJobFinishListeners(); jobRunner.run(); }
public void run() { try { if ( job.isStopped() || ( job.getParentJob() != null && job.getParentJob().isStopped() ) ) { return; job.fireJobStartListeners(); // Fire the start listeners result = job.execute( entryNr + 1, result ); } catch ( KettleException e ) { e.printStackTrace(); } finally { job.setResult( result ); try { ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.JobFinish.id, getJob() ); job.getJobMeta().disposeEmbeddedMetastoreProvider(); log.logDebug( BaseMessages.getString( PKG, "Job.Log.DisposeEmbeddedMetastore" ) ); job.fireJobFinishListeners(); log.logError( BaseMessages.getString( PKG, "Job.Log.ErrorExecJob", e.getMessage() ), e ); job.setFinished( true );
data.executorJob.shareVariablesWith( data.executorJobMeta ); data.executorJob.setParentTrans( getTrans() ); data.executorJob.setLogLevel( getLogLevel() ); data.executorJob.setInternalKettleVariables( this ); data.executorJob.copyParametersFrom( data.executorJobMeta ); data.executorJob.setArguments( getTrans().getArguments() ); data.executorJob.setSourceRows( data.groupBuffer ); data.executorJob.beginProcessing(); result = data.executorJob.execute( 0, result ); } catch ( KettleException e ) { log.logError( "An error occurred executing the job: ", e ); try { ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.JobFinish.id, data.executorJob ); getExecutorJob().getJobMeta().disposeEmbeddedMetastoreProvider(); log.logDebug( BaseMessages.getString( PKG, "JobExecutor.Log.DisposeEmbeddedMetastore" ) ); data.executorJob.fireJobFinishListeners(); } catch ( KettleException e ) { result.setNrErrors( 1 ); String channelId = data.executorJob.getLogChannelId(); String logText = KettleLogStore.getAppender().getBuffer( channelId, false ).toString(); outputRow[idx++] = logText; outputRow[idx++] = data.executorJob.getLogChannelId();
TopLevelResource topLevelResource = ResourceUtil.serializeResourceExportInterface( exportRepo, job.getJobMeta(), job, repository, getMetaStore() ); String launchFile = topLevelResource.getResourceName(); String message = ResourceUtil.getExplanation( exportRepo, launchFile, job.getJobMeta() ); System.out.println(); System.out.println( message ); job.setArguments( arguments != null ? arguments : null ); job.initializeVariablesFrom( null ); job.setLogLevel( getLog().getLogLevel() ); job.getJobMeta().setInternalKettleVariables( job ); job.setRepository( repository ); job.getJobMeta().setRepository( repository ); job.getJobMeta().setMetaStore( getMetaStore() ); String[] jobParams = job.getJobMeta().listParameters(); for ( String param : jobParams ) { String value = params.getParameterValue( param ); if ( value != null ) { job.getJobMeta().setParameterValue( param, value ); job.copyParametersFrom( job.getJobMeta() ); job.activateParameters(); String optionValue = customParams.getParameterValue( optionName ); if ( optionName != null && optionValue != null ) { job.getExtensionDataMap().put( optionName, optionValue ); job.start(); // Execute the selected job.
when( mockHttpServletResponse.getOutputStream() ).thenReturn( outMock ); when( mockJobMap.findJob( id ) ).thenReturn( mockJob ); PowerMockito.when( mockJob.getJobname() ).thenReturn( ServletTestUtils.BAD_STRING_TO_TEST ); PowerMockito.when( mockJob.getLogChannel() ).thenReturn( mockLogChannelInterface ); PowerMockito.when( mockJob.getJobMeta() ).thenReturn( mockJobMeta ); PowerMockito.when( mockJob.isFinished() ).thenReturn( true ); PowerMockito.when( mockJob.getLogChannelId() ).thenReturn( logId ); PowerMockito.when( mockJobMeta.getMaximum() ).thenReturn( new Point( 10, 10 ) ); when( mockJob.getStatus() ).thenReturn( "Finished" ); verify( mockJob, times( 1 ) ).getLogChannel();
@Before public void setUp() throws Exception { job = new Job( null, new JobMeta() ); entry = new JobEntryFolderIsEmpty(); job.getJobMeta().addJobEntry( new JobEntryCopy( entry ) ); entry.setParentJob( job ); JobMeta mockJobMeta = mock( JobMeta.class ); entry.setParentJobMeta( mockJobMeta ); job.setStopped( false ); File dir = Files.createTempDirectory( "dir", new FileAttribute<?>[0] ).toFile(); dir.deleteOnExit(); emptyDir = dir.getPath(); dir = Files.createTempDirectory( "dir", new FileAttribute<?>[0] ).toFile(); dir.deleteOnExit(); nonEmptyDir = dir.getPath(); File file = File.createTempFile( "existingFile", "ext", dir ); file.deleteOnExit(); }
@Test @PrepareForTest( { Encode.class } ) public void testStopJobServletEscapesHtmlWhenTransFound() throws ServletException, IOException { KettleLogStore.init(); HttpServletRequest mockHttpServletRequest = mock( HttpServletRequest.class ); HttpServletResponse mockHttpServletResponse = mock( HttpServletResponse.class ); Job mockJob = mock( Job.class ); JobMeta mockJobMeta = mock( JobMeta.class ); LogChannelInterface mockLogChannelInterface = mock( LogChannelInterface.class ); mockJob.setName( ServletTestUtils.BAD_STRING_TO_TEST ); StringWriter out = new StringWriter(); PrintWriter printWriter = new PrintWriter( out ); PowerMockito.spy( Encode.class ); when( mockHttpServletRequest.getContextPath() ).thenReturn( StopJobServlet.CONTEXT_PATH ); when( mockHttpServletRequest.getParameter( anyString() ) ).thenReturn( ServletTestUtils.BAD_STRING_TO_TEST ); when( mockHttpServletResponse.getWriter() ).thenReturn( printWriter ); when( mockJobMap.getJob( any( CarteObjectEntry.class ) ) ).thenReturn( mockJob ); when( mockJob.getLogChannelId() ).thenReturn( ServletTestUtils.BAD_STRING_TO_TEST ); when( mockJob.getLogChannel() ).thenReturn( mockLogChannelInterface ); when( mockJob.getJobMeta() ).thenReturn( mockJobMeta ); when( mockJobMeta.getMaximum() ).thenReturn( new Point( 10, 10 ) ); stopJobServlet.doGet( mockHttpServletRequest, mockHttpServletResponse ); assertFalse( ServletTestUtils.hasBadText( ServletTestUtils.getInsideOfTag( "H1", out.toString() ) ) ); PowerMockito.verifyStatic( atLeastOnce() ); Encode.forHtml( anyString() ); } }