private JobMeta buildJobMeta( final RepositoryFile file, final RepositoryDirectoryInterface parentDir, final NodeRepositoryFileData data, final ObjectRevision revision ) throws KettleException { JobMeta jobMeta = new JobMeta(); jobMeta.setName( file.getTitle() ); jobMeta.setFilename( file.getName() ); jobMeta.setDescription( file.getDescription() ); jobMeta.setObjectId( new StringObjectId( file.getId().toString() ) ); jobMeta.setObjectRevision( revision ); jobMeta.setRepository( this ); jobMeta.setRepositoryDirectory( parentDir ); jobMeta.setMetaStore( getMetaStore() ); readJobMetaSharedObjects( jobMeta ); // This should read from the local cache jobDelegate.dataNodeToElement( data.getNode(), jobMeta ); jobMeta.clearChanged(); return jobMeta; }
wJobname.setText( Const.NVL( jobMeta.getName(), "" ) ); wJobFilename.setText( Const.NVL( jobMeta.getFilename(), "" ) ); wJobdescription.setText( Const.NVL( jobMeta.getDescription(), "" ) ); wExtendeddescription.setText( Const.NVL( jobMeta.getExtendedDescription(), "" ) ); wJobversion.setText( Const.NVL( jobMeta.getJobversion(), "" ) ); wJobstatus.select( jobMeta.getJobstatus() - 1 ); if ( jobMeta.getRepositoryDirectory() != null ) { wDirectory.setText( jobMeta.getRepositoryDirectory().getPath() ); if ( jobMeta.getCreatedUser() != null ) { wCreateUser.setText( jobMeta.getCreatedUser() ); if ( jobMeta.getCreatedDate() != null && jobMeta.getCreatedDate() != null ) { wCreateDate.setText( jobMeta.getCreatedDate().toString() ); if ( jobMeta.getModifiedUser() != null ) { wModUser.setText( jobMeta.getModifiedUser() ); if ( jobMeta.getModifiedDate() != null && jobMeta.getModifiedDate() != null ) { wModDate.setText( jobMeta.getModifiedDate().toString() ); wBatchTrans.setSelection( jobMeta.isBatchIdPassed() ); String[] parameters = jobMeta.listParameters(); for ( int idx = 0; idx < parameters.length; idx++ ) { TableItem item = wParamFields.table.getItem( idx ); description = jobMeta.getParameterDescription( parameters[idx] );
public void widgetSelected( SelectionEvent e ) { DatabaseMeta databaseMeta = jobMeta.findDatabase( wConnection.getText() ); if ( databaseMeta != null ) { // cloning to avoid spoiling data on cancel or incorrect input DatabaseMeta clone = (DatabaseMeta) databaseMeta.clone(); String connectionName = showDbDialogUnlessCancelledOrValid( clone, databaseMeta ); if ( connectionName != null ) { // need to replace the old connection with a new one jobMeta.removeDatabase( jobMeta.indexOfDatabase( databaseMeta ) ); jobMeta.addDatabase( clone ); reinitConnectionDropDown( wConnection, connectionName ); } } } }
/** * Checks if is file reference. * * @return true, if is file reference */ public boolean isFileReference() { return !isRepReference( getFilename(), this.getName() ); }
/** * Save the parameters of this job to the repository. * * @param rep * The repository to save to. * * @throws KettleException * Upon any error. */ private void saveJobParameters( JobMeta jobMeta ) throws KettleException { String[] paramKeys = jobMeta.listParameters(); for ( int idx = 0; idx < paramKeys.length; idx++ ) { String desc = jobMeta.getParameterDescription( paramKeys[idx] ); String defValue = jobMeta.getParameterDefault( paramKeys[idx] ); insertJobParameter( jobMeta.getObjectId(), idx, paramKeys[idx], defValue, desc ); } }
JobMeta jobMeta = new JobMeta(); synchronized ( repository ) { try { jobMeta.clear(); jobMeta.setRepositoryDirectory( repdir ); jobMeta.setObjectId( getJobID( jobname, repdir.getObjectId() ) ); if ( jobMeta.getObjectId() != null ) { ObjectId[] noteids = repository.getJobNoteIDs( jobMeta.getObjectId() ); ObjectId[] jecids = repository.getJobEntryCopyIDs( jobMeta.getObjectId() ); ObjectId[] hopid = repository.getJobHopIDs( jobMeta.getObjectId() ); RowMetaAndData jobRow = getJob( jobMeta.getObjectId() ); jobMeta.setName( jobRow.getString( KettleDatabaseRepository.FIELD_JOB_NAME, null ) ); jobMeta.setDescription( jobRow.getString( KettleDatabaseRepository.FIELD_JOB_DESCRIPTION, null ) ); jobMeta.setExtendedDescription( jobRow.getString( KettleDatabaseRepository.FIELD_JOB_EXTENDED_DESCRIPTION, null ) ); jobMeta.setJobversion( jobRow.getString( KettleDatabaseRepository.FIELD_JOB_JOB_VERSION, null ) ); jobMeta.setJobstatus( Const.toInt( jobRow .getString( KettleDatabaseRepository.FIELD_JOB_JOB_STATUS, null ), -1 ) ); jobMeta.setCreatedUser( jobRow.getString( KettleDatabaseRepository.FIELD_JOB_CREATED_USER, null ) ); jobMeta.setCreatedDate( jobRow.getDate( KettleDatabaseRepository.FIELD_JOB_CREATED_DATE, new Date() ) );
clear(); setFilename( fname ); } else { setRepository( rep ); setName( XMLHandler.getTagValue( jobnode, "name" ) ); sharedObjectsFile = XMLHandler.getTagValue( jobnode, "shared_objects_file" ); if ( rep == null || ignoreRepositorySharedObjects ) { sharedObjects = readSharedObjects(); } else { sharedObjects = rep.readJobMetaSharedObjects( this ); importFromMetaStore(); addParameterDefinition( paramName, defValue, descr ); DatabaseMeta exist = findDatabase( dbcon.getName() ); if ( exist == null ) { addDatabase( dbcon ); } else { if ( !exist.isShared() ) { if ( shouldOverwrite( prompter, props, BaseMessages.getString( PKG, "JobMeta.Dialog.ConnectionExistsOverWrite.Message", dbcon.getName() ), BaseMessages.getString( PKG, "JobMeta.Dialog.ConnectionExistsOverWrite.DontShowAnyMoreMessage" ) ) ) { int idx = indexOfDatabase( exist );
getNamedClusterEmbedManager().clear(); retval.append( " " ).append( XMLHandler.addTagValue( "name", getName() ) ); String[] parameters = listParameters(); for ( int idx = 0; idx < parameters.length; idx++ ) { retval.append( " " ).append( XMLHandler.openTag( "parameter" ) ).append( Const.CR ); try { retval.append( " " ) .append( XMLHandler.addTagValue( "default_value", getParameterDefault( parameters[idx] ) ) ); retval.append( " " ) .append( XMLHandler.addTagValue( "description", getParameterDescription( parameters[idx] ) ) ); } catch ( UnknownParamException e ) { Set<DatabaseMeta> usedDatabaseMetas = getUsedDatabaseMetas(); for ( int i = 0; i < nrDatabases(); i++ ) { DatabaseMeta dbMeta = getDatabase( i ); if ( props != null && props.areOnlyUsedConnectionsSavedToXML() ) { if ( usedDatabaseMetas.contains( dbMeta ) ) { for ( LogTableInterface logTable : getLogTables() ) { retval.append( logTable.getXML() ); for ( int i = 0; i < nrJobEntries(); i++ ) { JobEntryCopy jge = getJobEntry( i ); jge.getEntry().setRepository( repository ); retval.append( jge.getXML() );
public void newJobFile() { try { JobMeta jobMeta = new JobMeta(); jobMeta.addObserver( this ); jobMeta.setRepository( rep ); jobMeta.setMetaStore( metaStore ); rep != null ? rep.readJobMetaSharedObjects( jobMeta ) : jobMeta.readSharedObjects(); sharedObjectsFileMap.put( sharedObjects.getFilename(), sharedObjects ); if ( rep == null ) { jobMeta.setSharedObjects( sharedObjects ); jobMeta.importFromMetaStore(); } catch ( Exception e ) { new ErrorDialog( jobMeta.setRepositoryDirectory( getDefaultSaveLocation( jobMeta ) ); jobMeta.setName( STRING_JOB + " " + nr ); jobMeta.setName( STRING_JOB + " " + nr ); // rename jobMeta.clearChanged();
@Override public JobMeta loadJob( String jobname, RepositoryDirectoryInterface repdir, ProgressMonitorListener monitor, String versionName ) throws KettleException { // This is a standard load of a transformation serialized in XML... // String filename = calcDirectoryName( repdir ) + jobname + EXT_JOB; JobMeta jobMeta = new JobMeta( filename, this ); jobMeta.setFilename( null ); jobMeta.setName( jobname ); jobMeta.setObjectId( new StringObjectId( calcObjectId( repdir, jobname, EXT_JOB ) ) ); jobMeta.setRepository( this ); jobMeta.setMetaStore( getMetaStore() ); readDatabases( jobMeta, true ); jobMeta.clearChanged(); return jobMeta; }
jobMeta.setPrivateDatabases( privateDatabases ); jobMeta.setSharedObjectsFile( getString( rootNode, PROP_SHARED_FILE ) ); jobMeta.getJobCopies().add( copy ); if ( jobMeta.getJobCopies().size() != nrCopies ) { throw new KettleException( "The number of job entry copies read [" + jobMeta.getJobCopies().size() + "] was not the number we expected [" + nrCopies + "]" ); String xml = getString( noteNode, PROP_XML ); jobMeta .addNote( new NotePadMeta( XMLHandler.getSubNode( XMLHandler.loadXMLString( xml ), NotePadMeta.XML_TAG ) ) ); if ( jobMeta.nrNotes() != nrNotes ) { throw new KettleException( "The number of notes read [" + jobMeta.nrNotes() + "] was not the number we expected [" + nrNotes + "]" ); JobEntryCopy copyFrom = jobMeta.findJobEntry( copyFromName, copyFromNr, true ); JobEntryCopy copyTo = jobMeta.findJobEntry( copyToName, copyToNr, true ); jobHopMeta.setEvaluation( evaluation ); jobHopMeta.setUnconditional( unconditional ); jobMeta.addJobHop( jobHopMeta ); if ( jobMeta.nrJobHops() != nrHops ) { throw new KettleException( "The number of hops read [" + jobMeta.nrJobHops() + "] was not the number we expected [" + nrHops + "]" );
if ( jobMeta.getPrivateDatabases() != null ) { String privateDatabaseNames = StringUtils.join( jobMeta.getPrivateDatabases(), JOB_PRIVATE_DATABASE_DELIMITER ); DataNode privateDatabaseNode = rootNode.addNode( NODE_JOB_PRIVATE_DATABASES ); privateDatabaseNode.setProperty( PROP_JOB_PRIVATE_DATABASE_NAMES, privateDatabaseNames ); notesNode.setProperty( PROP_NR_NOTES, jobMeta.nrNotes() ); for ( int i = 0; i < jobMeta.nrNotes(); i++ ) { NotePadMeta note = jobMeta.getNote( i ); DataNode noteNode = notesNode.addNode( NOTE_PREFIX + i ); noteNode.setProperty( PROP_XML, note.getXML() ); log.logDetailed( toString(), "Saving " + jobMeta.nrJobEntries() + " Job entry copies to repository..." ); //$NON-NLS-1$ //$NON-NLS-2$ entriesNode.setProperty( PROP_NR_JOB_ENTRY_COPIES, jobMeta.nrJobEntries() ); for ( int i = 0; i < jobMeta.nrJobEntries(); i++ ) { JobEntryCopy copy = jobMeta.getJobEntry( i ); JobEntryInterface entry = copy.getEntry(); hopsNode.setProperty( PROP_NR_HOPS, jobMeta.nrJobHops() ); for ( int i = 0; i < jobMeta.nrJobHops(); i++ ) { JobHopMeta hop = jobMeta.getJobHop( i ); DataNode hopNode = hopsNode.addNode( JOB_HOP_PREFIX + i ); String[] paramKeys = jobMeta.listParameters(); DataNode paramsNode = rootNode.addNode( NODE_PARAMETERS ); paramsNode.setProperty( PROP_NR_PARAMETERS, paramKeys == null ? 0 : paramKeys.length );
for ( int i = 0; i < jobMeta.nrDatabases(); i++ ) { jobMeta.getDatabase( i ).setObjectId( null ); jobMeta.setObjectId( null ); List<DatabaseMeta> oldDatabases = jobMeta.getDatabases(); jobMeta.setDatabases( new ArrayList<DatabaseMeta>() ); jobMeta.setSlaveServers( new ArrayList<SlaveServer>() ); repository != null ? repository.readJobMetaSharedObjects( jobMeta ) : jobMeta.readSharedObjects(); sharedObjectsFileMap.put( sharedObjects.getFilename(), sharedObjects ); DatabaseMeta newDatabase = DatabaseMeta.findDatabase( jobMeta.getDatabases(), oldDatabase.getName() ); jobMeta.addDatabase( oldDatabase ); repository.findDirectory( jobMeta.getRepositoryDirectory().getPath() ); if ( rdi != null && !rdi.getPath().equals( "/" ) ) { jobMeta.setRepositoryDirectory( rdi ); } else { jobMeta.setRepositoryDirectory( repository.loadRepositoryDirectoryTree() );
JobMeta jobMeta = new JobMeta( filename, null ); String[] declaredParameters = jobMeta.listParameters(); for ( int i = 0; i < declaredParameters.length; i++ ) { String parameterName = declaredParameters[i]; String description = jobMeta.getParameterDescription( parameterName ); String defaultValue = jobMeta.getParameterDefault( parameterName ); jobMeta.setParameterValue( parameterName, parameterValue );
public List<ResourceReference> getResourceDependencies( JobMeta jobMeta ) { List<ResourceReference> references = super.getResourceDependencies( jobMeta ); if ( !Utils.isEmpty( serverName ) ) { String realServerName = jobMeta.environmentSubstitute( serverName ); ResourceReference reference = new ResourceReference( this ); reference.getEntries().add( new ResourceEntry( realServerName, ResourceType.SERVER ) ); references.add( reference ); } return references; }
@Test public void testCurrentDirJob() throws Exception { final String dirName = "dirName"; final String jobName = "job"; JobMeta setupJobMeta = new JobMeta(); setupJobMeta.setName( jobName ); RepositoryDirectoryInterface repoDir = repository.createRepositoryDirectory( new RepositoryDirectory(), dirName ); setupJobMeta.setRepositoryDirectory( repoDir ); repository.save( setupJobMeta, "" ); JobMeta jobMeta = repository.loadJob( jobName, repoDir, null, "" ); assertEquals( repository, jobMeta.getRepository() ); assertEquals( repoDir.getPath(), jobMeta.getRepositoryDirectory().getPath() ); jobMeta.setInternalKettleVariables(); String currentDir = jobMeta.getVariable( Const.INTERNAL_VARIABLE_ENTRY_CURRENT_DIRECTORY ); assertEquals( repoDir.getPath(), currentDir ); }
private boolean testEquals( String name, RepositoryDirectoryInterface repDirectory, ObjectRevision revision, String filename ) { JobMeta jobMeta2 = new JobMeta(); jobMeta2.setName( name ); jobMeta2.setRepositoryDirectory( repDirectory ); jobMeta2.setObjectRevision( revision ); jobMeta2.setFilename( filename ); return jobMeta.equals( jobMeta2 ); }
String fullname; String extension = "kjb"; if ( Utils.isEmpty( getFilename() ) ) { baseName = getName(); fullname = directory.getPath() + ( directory.getPath().endsWith( RepositoryDirectory.DIRECTORY_SEPARATOR ) ? "" : RepositoryDirectory.DIRECTORY_SEPARATOR ) + getName() + "." + extension; // } else { FileObject fileObject = KettleVFS.getFileObject( space.environmentSubstitute( getFilename() ), space ); originalPath = fileObject.getParent().getName().getPath(); baseName = fileObject.getName().getBaseName(); JobMeta jobMeta = (JobMeta) this.realClone( false ); jobMeta.setRepositoryDirectory( directory ); compatibleJobEntryExportResources( jobEntry.getEntry(), jobMeta, definitions, namingInterface, repository ); jobEntry.getEntry().exportResources( jobMeta, definitions, namingInterface, repository, metaStore ); for ( String directory : directoryMap.keySet() ) { String parameterName = directoryMap.get( directory ); jobMeta.addParameterDefinition( parameterName, directory, "Data file path discovered during export" ); String jobMetaContent = jobMeta.getXML(); if ( Utils.isEmpty( this.getFilename() ) ) { // Repository definition.setOrigin( fullname );
@Test public void testSetInternalEntryCurrentDirectoryWithFilename( ) { JobMeta jobMetaTest = new JobMeta( ); jobMetaTest.setFilename( "hasFilename" ); jobMetaTest.setVariable( Const.INTERNAL_VARIABLE_ENTRY_CURRENT_DIRECTORY, "Original value defined at run execution" ); jobMetaTest.setVariable( Const.INTERNAL_VARIABLE_JOB_FILENAME_DIRECTORY, "file:///C:/SomeFilenameDirectory" ); jobMetaTest.setVariable( Const.INTERNAL_VARIABLE_JOB_REPOSITORY_DIRECTORY, "/SomeRepDirectory" ); jobMetaTest.setInternalEntryCurrentDirectory(); assertEquals( "file:///C:/SomeFilenameDirectory", jobMetaTest.getVariable( Const.INTERNAL_VARIABLE_ENTRY_CURRENT_DIRECTORY ) ); }
@Test public void testUpdateCurrentDirWithFilename( ) { JobMeta jobMetaTest = new JobMeta( ); jobMetaTest.setFilename( "hasFilename" ); jobMetaTest.setVariable( Const.INTERNAL_VARIABLE_ENTRY_CURRENT_DIRECTORY, "Original value defined at run execution" ); jobMetaTest.setVariable( Const.INTERNAL_VARIABLE_JOB_FILENAME_DIRECTORY, "file:///C:/SomeFilenameDirectory" ); jobMetaTest.setVariable( Const.INTERNAL_VARIABLE_JOB_REPOSITORY_DIRECTORY, "/SomeRepDirectory" ); jobMetaTest.updateCurrentDir(); assertEquals( "file:///C:/SomeFilenameDirectory", jobMetaTest.getVariable( Const.INTERNAL_VARIABLE_ENTRY_CURRENT_DIRECTORY ) ); }