congrats Icon
New! Announcing Tabnine Chat Beta
Learn More
Tabnine Logo
JobMeta
Code IndexAdd Tabnine to your IDE (free)

How to use
JobMeta
in
org.pentaho.di.job

Best Java code snippets using org.pentaho.di.job.JobMeta (Showing top 20 results out of 315)

origin: pentaho/pentaho-kettle

private JobMeta buildJobMeta( final RepositoryFile file, final RepositoryDirectoryInterface parentDir,
               final NodeRepositoryFileData data, final ObjectRevision revision )
 throws KettleException {
 JobMeta jobMeta = new JobMeta();
 jobMeta.setName( file.getTitle() );
 jobMeta.setFilename( file.getName() );
 jobMeta.setDescription( file.getDescription() );
 jobMeta.setObjectId( new StringObjectId( file.getId().toString() ) );
 jobMeta.setObjectRevision( revision );
 jobMeta.setRepository( this );
 jobMeta.setRepositoryDirectory( parentDir );
 jobMeta.setMetaStore( getMetaStore() );
 readJobMetaSharedObjects( jobMeta ); // This should read from the local cache
 jobDelegate.dataNodeToElement( data.getNode(), jobMeta );
 jobMeta.clearChanged();
 return jobMeta;
}
origin: pentaho/pentaho-kettle

wJobname.setText( Const.NVL( jobMeta.getName(), "" ) );
wJobFilename.setText( Const.NVL( jobMeta.getFilename(), "" ) );
wJobdescription.setText( Const.NVL( jobMeta.getDescription(), "" ) );
wExtendeddescription.setText( Const.NVL( jobMeta.getExtendedDescription(), "" ) );
wJobversion.setText( Const.NVL( jobMeta.getJobversion(), "" ) );
wJobstatus.select( jobMeta.getJobstatus() - 1 );
if ( jobMeta.getRepositoryDirectory() != null ) {
 wDirectory.setText( jobMeta.getRepositoryDirectory().getPath() );
if ( jobMeta.getCreatedUser() != null ) {
 wCreateUser.setText( jobMeta.getCreatedUser() );
if ( jobMeta.getCreatedDate() != null && jobMeta.getCreatedDate() != null ) {
 wCreateDate.setText( jobMeta.getCreatedDate().toString() );
if ( jobMeta.getModifiedUser() != null ) {
 wModUser.setText( jobMeta.getModifiedUser() );
if ( jobMeta.getModifiedDate() != null && jobMeta.getModifiedDate() != null ) {
 wModDate.setText( jobMeta.getModifiedDate().toString() );
wBatchTrans.setSelection( jobMeta.isBatchIdPassed() );
String[] parameters = jobMeta.listParameters();
for ( int idx = 0; idx < parameters.length; idx++ ) {
 TableItem item = wParamFields.table.getItem( idx );
  description = jobMeta.getParameterDescription( parameters[idx] );
origin: pentaho/pentaho-kettle

 public void widgetSelected( SelectionEvent e ) {
  DatabaseMeta databaseMeta = jobMeta.findDatabase( wConnection.getText() );
  if ( databaseMeta != null ) {
   // cloning to avoid spoiling data on cancel or incorrect input
   DatabaseMeta clone = (DatabaseMeta) databaseMeta.clone();
   String connectionName = showDbDialogUnlessCancelledOrValid( clone, databaseMeta );
   if ( connectionName != null ) {
    // need to replace the old connection with a new one
    jobMeta.removeDatabase( jobMeta.indexOfDatabase( databaseMeta ) );
    jobMeta.addDatabase( clone );
    reinitConnectionDropDown( wConnection, connectionName );
   }
  }
 }
}
origin: pentaho/pentaho-kettle

/**
 * Checks if is file reference.
 *
 * @return true, if is file reference
 */
public boolean isFileReference() {
 return !isRepReference( getFilename(), this.getName() );
}
origin: pentaho/pentaho-kettle

/**
 * Save the parameters of this job to the repository.
 *
 * @param rep
 *          The repository to save to.
 *
 * @throws KettleException
 *           Upon any error.
 */
private void saveJobParameters( JobMeta jobMeta ) throws KettleException {
 String[] paramKeys = jobMeta.listParameters();
 for ( int idx = 0; idx < paramKeys.length; idx++ ) {
  String desc = jobMeta.getParameterDescription( paramKeys[idx] );
  String defValue = jobMeta.getParameterDefault( paramKeys[idx] );
  insertJobParameter( jobMeta.getObjectId(), idx, paramKeys[idx], defValue, desc );
 }
}
origin: pentaho/pentaho-kettle

JobMeta jobMeta = new JobMeta();
synchronized ( repository ) {
 try {
  jobMeta.clear();
  jobMeta.setRepositoryDirectory( repdir );
  jobMeta.setObjectId( getJobID( jobname, repdir.getObjectId() ) );
  if ( jobMeta.getObjectId() != null ) {
   ObjectId[] noteids = repository.getJobNoteIDs( jobMeta.getObjectId() );
   ObjectId[] jecids = repository.getJobEntryCopyIDs( jobMeta.getObjectId() );
   ObjectId[] hopid = repository.getJobHopIDs( jobMeta.getObjectId() );
   RowMetaAndData jobRow = getJob( jobMeta.getObjectId() );
   jobMeta.setName( jobRow.getString( KettleDatabaseRepository.FIELD_JOB_NAME, null ) );
   jobMeta.setDescription( jobRow.getString( KettleDatabaseRepository.FIELD_JOB_DESCRIPTION, null ) );
   jobMeta.setExtendedDescription( jobRow.getString(
    KettleDatabaseRepository.FIELD_JOB_EXTENDED_DESCRIPTION, null ) );
   jobMeta.setJobversion( jobRow.getString( KettleDatabaseRepository.FIELD_JOB_JOB_VERSION, null ) );
   jobMeta.setJobstatus( Const.toInt( jobRow
    .getString( KettleDatabaseRepository.FIELD_JOB_JOB_STATUS, null ), -1 ) );
   jobMeta.setCreatedUser( jobRow.getString( KettleDatabaseRepository.FIELD_JOB_CREATED_USER, null ) );
   jobMeta.setCreatedDate( jobRow.getDate( KettleDatabaseRepository.FIELD_JOB_CREATED_DATE, new Date() ) );
origin: pentaho/pentaho-kettle

clear();
 setFilename( fname );
}  else {
 setRepository( rep );
setName( XMLHandler.getTagValue( jobnode, "name" ) );
 sharedObjectsFile = XMLHandler.getTagValue( jobnode, "shared_objects_file" );
 if ( rep == null || ignoreRepositorySharedObjects ) {
  sharedObjects = readSharedObjects();
 } else {
  sharedObjects = rep.readJobMetaSharedObjects( this );
importFromMetaStore();
 addParameterDefinition( paramName, defValue, descr );
 DatabaseMeta exist = findDatabase( dbcon.getName() );
 if ( exist == null ) {
  addDatabase( dbcon );
 } else {
  if ( !exist.isShared() ) {
   if ( shouldOverwrite( prompter, props,
     BaseMessages.getString( PKG, "JobMeta.Dialog.ConnectionExistsOverWrite.Message", dbcon.getName() ),
     BaseMessages.getString( PKG, "JobMeta.Dialog.ConnectionExistsOverWrite.DontShowAnyMoreMessage" ) ) ) {
    int idx = indexOfDatabase( exist );
origin: pentaho/pentaho-kettle

getNamedClusterEmbedManager().clear();
retval.append( "  " ).append( XMLHandler.addTagValue( "name", getName() ) );
String[] parameters = listParameters();
for ( int idx = 0; idx < parameters.length; idx++ ) {
 retval.append( "      " ).append( XMLHandler.openTag( "parameter" ) ).append( Const.CR );
 try {
  retval.append( "        " )
    .append( XMLHandler.addTagValue( "default_value", getParameterDefault( parameters[idx] ) ) );
  retval.append( "        " )
    .append( XMLHandler.addTagValue( "description", getParameterDescription( parameters[idx] ) ) );
 } catch ( UnknownParamException e ) {
Set<DatabaseMeta> usedDatabaseMetas = getUsedDatabaseMetas();
for ( int i = 0; i < nrDatabases(); i++ ) {
 DatabaseMeta dbMeta = getDatabase( i );
 if ( props != null && props.areOnlyUsedConnectionsSavedToXML() ) {
  if ( usedDatabaseMetas.contains( dbMeta ) ) {
for ( LogTableInterface logTable : getLogTables() ) {
 retval.append( logTable.getXML() );
for ( int i = 0; i < nrJobEntries(); i++ ) {
 JobEntryCopy jge = getJobEntry( i );
 jge.getEntry().setRepository( repository );
 retval.append( jge.getXML() );
origin: pentaho/pentaho-kettle

public void newJobFile() {
 try {
  JobMeta jobMeta = new JobMeta();
  jobMeta.addObserver( this );
  jobMeta.setRepository( rep );
  jobMeta.setMetaStore( metaStore );
    rep != null ? rep.readJobMetaSharedObjects( jobMeta ) : jobMeta.readSharedObjects();
   sharedObjectsFileMap.put( sharedObjects.getFilename(), sharedObjects );
   if ( rep == null ) {
    jobMeta.setSharedObjects( sharedObjects );
   jobMeta.importFromMetaStore();
  } catch ( Exception e ) {
   new ErrorDialog(
  jobMeta.setRepositoryDirectory( getDefaultSaveLocation( jobMeta ) );
  jobMeta.setName( STRING_JOB + " " + nr );
   jobMeta.setName( STRING_JOB + " " + nr ); // rename
  jobMeta.clearChanged();
origin: pentaho/pentaho-kettle

@Override
public JobMeta loadJob( String jobname, RepositoryDirectoryInterface repdir, ProgressMonitorListener monitor,
 String versionName ) throws KettleException {
 // This is a standard load of a transformation serialized in XML...
 //
 String filename = calcDirectoryName( repdir ) + jobname + EXT_JOB;
 JobMeta jobMeta = new JobMeta( filename, this );
 jobMeta.setFilename( null );
 jobMeta.setName( jobname );
 jobMeta.setObjectId( new StringObjectId( calcObjectId( repdir, jobname, EXT_JOB ) ) );
 jobMeta.setRepository( this );
 jobMeta.setMetaStore( getMetaStore() );
 readDatabases( jobMeta, true );
 jobMeta.clearChanged();
 return jobMeta;
}
origin: pentaho/pentaho-kettle

jobMeta.setPrivateDatabases( privateDatabases );
jobMeta.setSharedObjectsFile( getString( rootNode, PROP_SHARED_FILE ) );
 jobMeta.getJobCopies().add( copy );
if ( jobMeta.getJobCopies().size() != nrCopies ) {
 throw new KettleException( "The number of job entry copies read [" + jobMeta.getJobCopies().size()
   + "] was not the number we expected [" + nrCopies + "]" );
 String xml = getString( noteNode, PROP_XML );
 jobMeta
   .addNote( new NotePadMeta( XMLHandler.getSubNode( XMLHandler.loadXMLString( xml ), NotePadMeta.XML_TAG ) ) );
if ( jobMeta.nrNotes() != nrNotes ) {
 throw new KettleException( "The number of notes read [" + jobMeta.nrNotes()
   + "] was not the number we expected [" + nrNotes + "]" );
 JobEntryCopy copyFrom = jobMeta.findJobEntry( copyFromName, copyFromNr, true );
 JobEntryCopy copyTo = jobMeta.findJobEntry( copyToName, copyToNr, true );
 jobHopMeta.setEvaluation( evaluation );
 jobHopMeta.setUnconditional( unconditional );
 jobMeta.addJobHop( jobHopMeta );
if ( jobMeta.nrJobHops() != nrHops ) {
 throw new KettleException( "The number of hops read [" + jobMeta.nrJobHops()
   + "] was not the number we expected [" + nrHops + "]" );
origin: pentaho/pentaho-kettle

if ( jobMeta.getPrivateDatabases() != null ) {
 String privateDatabaseNames = StringUtils.join( jobMeta.getPrivateDatabases(), JOB_PRIVATE_DATABASE_DELIMITER );
 DataNode privateDatabaseNode = rootNode.addNode( NODE_JOB_PRIVATE_DATABASES );
 privateDatabaseNode.setProperty( PROP_JOB_PRIVATE_DATABASE_NAMES, privateDatabaseNames );
notesNode.setProperty( PROP_NR_NOTES, jobMeta.nrNotes() );
for ( int i = 0; i < jobMeta.nrNotes(); i++ ) {
 NotePadMeta note = jobMeta.getNote( i );
 DataNode noteNode = notesNode.addNode( NOTE_PREFIX + i );
 noteNode.setProperty( PROP_XML, note.getXML() );
 log.logDetailed( toString(), "Saving " + jobMeta.nrJobEntries() + " Job entry copies to repository..." ); //$NON-NLS-1$ //$NON-NLS-2$
entriesNode.setProperty( PROP_NR_JOB_ENTRY_COPIES, jobMeta.nrJobEntries() );
for ( int i = 0; i < jobMeta.nrJobEntries(); i++ ) {
 JobEntryCopy copy = jobMeta.getJobEntry( i );
 JobEntryInterface entry = copy.getEntry();
hopsNode.setProperty( PROP_NR_HOPS, jobMeta.nrJobHops() );
for ( int i = 0; i < jobMeta.nrJobHops(); i++ ) {
 JobHopMeta hop = jobMeta.getJobHop( i );
 DataNode hopNode = hopsNode.addNode( JOB_HOP_PREFIX + i );
String[] paramKeys = jobMeta.listParameters();
DataNode paramsNode = rootNode.addNode( NODE_PARAMETERS );
paramsNode.setProperty( PROP_NR_PARAMETERS, paramKeys == null ? 0 : paramKeys.length );
origin: pentaho/pentaho-kettle

for ( int i = 0; i < jobMeta.nrDatabases(); i++ ) {
 jobMeta.getDatabase( i ).setObjectId( null );
jobMeta.setObjectId( null );
List<DatabaseMeta> oldDatabases = jobMeta.getDatabases();
jobMeta.setDatabases( new ArrayList<DatabaseMeta>() );
jobMeta.setSlaveServers( new ArrayList<SlaveServer>() );
  repository != null ? repository.readJobMetaSharedObjects( jobMeta ) : jobMeta.readSharedObjects();
 sharedObjectsFileMap.put( sharedObjects.getFilename(), sharedObjects );
 DatabaseMeta newDatabase = DatabaseMeta.findDatabase( jobMeta.getDatabases(), oldDatabase.getName() );
   jobMeta.addDatabase( oldDatabase );
   repository.findDirectory( jobMeta.getRepositoryDirectory().getPath() );
  if ( rdi != null && !rdi.getPath().equals( "/" ) ) {
   jobMeta.setRepositoryDirectory( rdi );
  } else {
   jobMeta.setRepositoryDirectory( repository.loadRepositoryDirectoryTree() );
origin: pentaho/pdi-sdk-plugins

JobMeta jobMeta = new JobMeta( filename, null );
String[] declaredParameters = jobMeta.listParameters();
for ( int i = 0; i < declaredParameters.length; i++ ) {
 String parameterName = declaredParameters[i];
 String description = jobMeta.getParameterDescription( parameterName );
 String defaultValue = jobMeta.getParameterDefault( parameterName );
 jobMeta.setParameterValue( parameterName, parameterValue );
origin: pentaho/pentaho-kettle

public List<ResourceReference> getResourceDependencies( JobMeta jobMeta ) {
 List<ResourceReference> references = super.getResourceDependencies( jobMeta );
 if ( !Utils.isEmpty( serverName ) ) {
  String realServerName = jobMeta.environmentSubstitute( serverName );
  ResourceReference reference = new ResourceReference( this );
  reference.getEntries().add( new ResourceEntry( realServerName, ResourceType.SERVER ) );
  references.add( reference );
 }
 return references;
}
origin: pentaho/pentaho-kettle

@Test
public void testCurrentDirJob() throws Exception {
 final String dirName = "dirName";
 final String jobName = "job";
 JobMeta setupJobMeta = new JobMeta();
 setupJobMeta.setName( jobName );
 RepositoryDirectoryInterface repoDir = repository.createRepositoryDirectory( new RepositoryDirectory(), dirName );
 setupJobMeta.setRepositoryDirectory( repoDir );
 repository.save( setupJobMeta, "" );
 JobMeta jobMeta = repository.loadJob( jobName, repoDir, null, "" );
 assertEquals( repository, jobMeta.getRepository() );
 assertEquals( repoDir.getPath(), jobMeta.getRepositoryDirectory().getPath() );
 jobMeta.setInternalKettleVariables();
 String currentDir = jobMeta.getVariable( Const.INTERNAL_VARIABLE_ENTRY_CURRENT_DIRECTORY );
 assertEquals( repoDir.getPath(), currentDir );
}
origin: pentaho/pentaho-kettle

private boolean testEquals( String name, RepositoryDirectoryInterface repDirectory, ObjectRevision revision,
  String filename ) {
 JobMeta jobMeta2 = new JobMeta();
 jobMeta2.setName( name );
 jobMeta2.setRepositoryDirectory( repDirectory );
 jobMeta2.setObjectRevision( revision );
 jobMeta2.setFilename( filename );
 return jobMeta.equals( jobMeta2 );
}
origin: pentaho/pentaho-kettle

String fullname;
String extension = "kjb";
if ( Utils.isEmpty( getFilename() ) ) {
 baseName = getName();
 fullname =
   directory.getPath() + ( directory.getPath().endsWith( RepositoryDirectory.DIRECTORY_SEPARATOR ) ? ""
     : RepositoryDirectory.DIRECTORY_SEPARATOR ) + getName() + "." + extension; //
} else {
 FileObject fileObject = KettleVFS.getFileObject( space.environmentSubstitute( getFilename() ), space );
 originalPath = fileObject.getParent().getName().getPath();
 baseName = fileObject.getName().getBaseName();
 JobMeta jobMeta = (JobMeta) this.realClone( false );
 jobMeta.setRepositoryDirectory( directory );
  compatibleJobEntryExportResources( jobEntry.getEntry(), jobMeta, definitions, namingInterface, repository );
  jobEntry.getEntry().exportResources( jobMeta, definitions, namingInterface, repository, metaStore );
  for ( String directory : directoryMap.keySet() ) {
   String parameterName = directoryMap.get( directory );
   jobMeta.addParameterDefinition( parameterName, directory, "Data file path discovered during export" );
 String jobMetaContent = jobMeta.getXML();
 if ( Utils.isEmpty( this.getFilename() ) ) { // Repository
  definition.setOrigin( fullname );
origin: pentaho/pentaho-kettle

@Test
public void testSetInternalEntryCurrentDirectoryWithFilename( ) {
 JobMeta jobMetaTest = new JobMeta(  );
 jobMetaTest.setFilename( "hasFilename" );
 jobMetaTest.setVariable( Const.INTERNAL_VARIABLE_ENTRY_CURRENT_DIRECTORY, "Original value defined at run execution" );
 jobMetaTest.setVariable( Const.INTERNAL_VARIABLE_JOB_FILENAME_DIRECTORY, "file:///C:/SomeFilenameDirectory" );
 jobMetaTest.setVariable( Const.INTERNAL_VARIABLE_JOB_REPOSITORY_DIRECTORY, "/SomeRepDirectory" );
 jobMetaTest.setInternalEntryCurrentDirectory();
 assertEquals( "file:///C:/SomeFilenameDirectory", jobMetaTest.getVariable( Const.INTERNAL_VARIABLE_ENTRY_CURRENT_DIRECTORY ) );
}
origin: pentaho/pentaho-kettle

@Test
public void testUpdateCurrentDirWithFilename( ) {
 JobMeta jobMetaTest = new JobMeta(  );
 jobMetaTest.setFilename( "hasFilename" );
 jobMetaTest.setVariable( Const.INTERNAL_VARIABLE_ENTRY_CURRENT_DIRECTORY, "Original value defined at run execution" );
 jobMetaTest.setVariable( Const.INTERNAL_VARIABLE_JOB_FILENAME_DIRECTORY, "file:///C:/SomeFilenameDirectory" );
 jobMetaTest.setVariable( Const.INTERNAL_VARIABLE_JOB_REPOSITORY_DIRECTORY, "/SomeRepDirectory" );
 jobMetaTest.updateCurrentDir();
 assertEquals( "file:///C:/SomeFilenameDirectory", jobMetaTest.getVariable( Const.INTERNAL_VARIABLE_ENTRY_CURRENT_DIRECTORY ) );
}
org.pentaho.di.jobJobMeta

Javadoc

The definition of a PDI job is represented by a JobMeta object. It is typically loaded from a .kjb file, a PDI repository, or it is generated dynamically. The declared parameters of the job definition are then queried using listParameters() and assigned values using calls to setParameterValue(..). JobMeta provides methods to load, save, verify, etc.

Most used methods

  • environmentSubstitute
  • <init>
    Create a new JobMeta object by loading it from a a DOM node.
  • getName
  • findDatabase
  • getDatabases
  • getJobCopies
    Gets the job copies.
  • getParameterDefault
  • getXML
  • setName
  • addDatabase
  • addJobEntry
    Adds the job entry.
  • addJobHop
    Adds the job hop.
  • addJobEntry,
  • addJobHop,
  • addNote,
  • addParameterDefinition,
  • clearChanged,
  • findJobEntry,
  • getCreatedDate,
  • getCreatedUser,
  • getDatabase,
  • getExtendedDescription

Popular in Java

  • Reactive rest calls using spring rest template
  • compareTo (BigDecimal)
  • getSystemService (Context)
  • scheduleAtFixedRate (ScheduledExecutorService)
  • InputStream (java.io)
    A readable source of bytes.Most clients will use input streams that read data from the file system (
  • Time (java.sql)
    Java representation of an SQL TIME value. Provides utilities to format and parse the time's represen
  • GregorianCalendar (java.util)
    GregorianCalendar is a concrete subclass of Calendarand provides the standard calendar used by most
  • HashMap (java.util)
    HashMap is an implementation of Map. All optional operations are supported.All elements are permitte
  • Scanner (java.util)
    A parser that parses a text string of primitive types and strings with the help of regular expressio
  • ReentrantLock (java.util.concurrent.locks)
    A reentrant mutual exclusion Lock with the same basic behavior and semantics as the implicit monitor
  • From CI to AI: The AI layer in your organization
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now