private List<RunConfiguration> createSlaveServerRunConfigurations( List<String> existingConfigurationNames, AbstractMeta abstractMeta ) { List<RunConfiguration> runConfigurations = new ArrayList<>(); if ( abstractMeta instanceof JobMeta ) { JobMeta jobMeta = (JobMeta) abstractMeta; Map<String, List<JobEntryTrans>> slaveServerGroups = jobMeta.getJobCopies().stream() .map( JobEntryCopy::getEntry ) .filter( entry -> entry instanceof JobEntryTrans ) .map( entry -> (JobEntryTrans) entry ) .filter( entry -> Utils.isEmpty( entry.getRunConfiguration() ) ) .filter( entry -> !Utils.isEmpty( entry.getRemoteSlaveServerName() ) ) .collect( Collectors.groupingBy( JobEntryTrans::getRemoteSlaveServerName ) ); slaveServerGroups.forEach( (remoteServerName, entries ) -> { String runConfigurationName = createRunConfigurationName( existingConfigurationNames, remoteServerName ); DefaultRunConfiguration runConfiguration = createRunConfiguration( runConfigurationName, remoteServerName ); runConfigurations.add( runConfiguration ); entries.forEach( e -> e.setRunConfiguration( runConfiguration.getName() ) ); } ); } return runConfigurations; }
private Set<DatabaseMeta> getUsedDatabaseMetas() { Set<DatabaseMeta> databaseMetas = new HashSet<DatabaseMeta>(); for ( JobEntryCopy jobEntryCopy : getJobCopies() ) { DatabaseMeta[] dbs = jobEntryCopy.getEntry().getUsedDatabaseConnections(); if ( dbs != null ) { for ( DatabaseMeta db : dbs ) { databaseMetas.add( db ); } } } databaseMetas.add( jobLogTable.getDatabaseMeta() ); for ( LogTableInterface logTable : getExtraLogTables() ) { databaseMetas.add( logTable.getDatabaseMeta() ); } return databaseMetas; }
private void patchJobEntries( JobMeta jobMeta ) { for ( JobEntryCopy copy : jobMeta.getJobCopies() ) { JobEntryInterface jobEntryInterface = copy.getEntry(); if ( jobEntryInterface instanceof HasRepositoryDirectories ) { patchRepositoryDirectories( jobEntryInterface.isReferencedObjectEnabled(), (HasRepositoryDirectories) jobEntryInterface ); } } }
@Override JobMeta createJobMetaForNode( Node jobnode ) throws KettleXMLException { JobMeta meta = mock( JobMeta.class ); JobEntryCopy jec = mock( JobEntryCopy.class ); when( jec.isTransformation() ).thenReturn( true ); when( jec.getEntry() ).thenReturn( jobEntryInterface ); when( meta.getJobCopies() ).thenReturn( Collections.singletonList( jec ) ); return meta; }
public void getInfo() { try { getConfiguration().setExpandingRemoteJob( wExpandRemote.getSelection() ); getConfiguration().setRunConfiguration( wRunConfiguration.getText() ); // various settings // configuration.setReplayDate( null ); configuration.setSafeModeEnabled( wSafeMode.getSelection() ); configuration.setClearingLog( wClearLog.getSelection() ); configuration.setLogLevel( LogLevel.values()[wLogLevel.getSelectionIndex()] ); String startCopyName = null; int startCopyNr = 0; if ( !Utils.isEmpty( wStartCopy.getText() ) ) { if ( wStartCopy.getSelectionIndex() >= 0 ) { JobEntryCopy copy = ( (JobMeta) abstractMeta ).getJobCopies().get( wStartCopy.getSelectionIndex() ); startCopyName = copy.getName(); startCopyNr = copy.getNr(); } } getConfiguration().setStartCopyName( startCopyName ); getConfiguration().setStartCopyNr( startCopyNr ); // The lower part of the dialog... getInfoParameters(); getInfoVariables(); // Metrics configuration.setGatheringMetrics( wGatherMetrics.getSelection() ); } catch ( Exception e ) { new ErrorDialog( shell, "Error in settings", "There is an error in the dialog settings", e ); } }
String[] names = new String[jobMeta.getJobCopies().size()]; for ( int i = 0; i < names.length; i++ ) { JobEntryCopy copy = jobMeta.getJobCopies().get( i ); names[i] = getJobEntryCopyName( copy );
/** * Write job entry log information. * * @throws KettleException * the kettle exception */ protected void writeJobEntryLogInformation() throws KettleException { Database db = null; JobEntryLogTable jobEntryLogTable = getJobMeta().getJobEntryLogTable(); try { db = createDataBase( jobEntryLogTable.getDatabaseMeta() ); db.shareVariablesWith( this ); db.connect(); db.setCommit( logCommitSize ); for ( JobEntryCopy copy : getJobMeta().getJobCopies() ) { db.writeLogRecord( jobEntryLogTable, LogStatus.START, copy, this ); } db.cleanupLogRecords( jobEntryLogTable ); } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( PKG, "Job.Exception.UnableToJobEntryInformationToLogTable" ), e ); } finally { if ( !db.isAutoCommit() ) { db.commitLog( true, jobEntryLogTable ); } db.disconnect(); } }
jobMeta.getJobCopies().add( copy ); if ( jobMeta.getJobCopies().size() != nrCopies ) { throw new KettleException( "The number of job entry copies read [" + jobMeta.getJobCopies().size() + "] was not the number we expected [" + nrCopies + "]" );
for ( JobEntryCopy copy : jobMeta.getJobCopies() ) { results.add( new JobEntryCopyResult( copy.getName(), null, null, copy.getNr() ) );
jobMeta.setFilename( jobMetaFilename ); for ( JobEntryCopy copy : jobMeta.getJobCopies() ) { JobEntryInterface entry = copy.getEntry(); if ( entry instanceof JobEntryTrans ) {
for ( JobEntryCopy copy : jobMeta.getJobCopies() ) { if ( jec.getEntry() == copy.getEntry() ) { copyNr++; + ( i + 1 ) + "/" + ( jecids.length ) ); JobHopMeta hi = loadJobHopMeta( hopid[i], jobMeta.getJobCopies() ); jobMeta.getJobhops().add( hi ); if ( monitor != null ) {