@Test public void filesWithNoPath_AreNotProcessed_ArgsOfCurrentJob() throws Exception { jobEntry.setArguments( new String[] { Const.EMPTY_STRING, STRING_SPACES_ONLY } ); jobEntry.setFilemasks( new String[] { null, null } ); jobEntry.setArgFromPrevious( false ); jobEntry.execute( new Result(), 0 ); verify( jobEntry, never() ).processFile( anyString(), anyString(), any( Job.class ) ); }
public Object clone() { JobEntryDeleteFiles jobEntry = (JobEntryDeleteFiles) super.clone(); if ( arguments != null ) { int nrFields = arguments.length; jobEntry.allocate( nrFields ); System.arraycopy( arguments, 0, jobEntry.arguments, 0, nrFields ); System.arraycopy( filemasks, 0, jobEntry.filemasks, 0, nrFields ); } return jobEntry; }
/** * Copy information from the meta-data input to the dialog fields. */ public void getData() { if ( jobEntry.getName() != null ) { wName.setText( jobEntry.getName() ); } String[] arguments = jobEntry.getArguments(); String[] fileMasks = jobEntry.getFilemasks(); if ( arguments != null ) { for ( int i = 0; i < arguments.length; i++ ) { final String argument = arguments[i]; final String fileMask = fileMasks[i]; TableItem ti = wFields.table.getItem( i ); if ( argument != null ) { ti.setText( 1, argument ); } if ( fileMask != null ) { ti.setText( 2, fileMask ); } } wFields.setRowNums(); wFields.optWidth( true ); } wPrevious.setSelection( jobEntry.isArgFromPrevious() ); wIncludeSubfolders.setSelection( jobEntry.isIncludeSubfolders() ); wName.selectAll(); wName.setFocus(); }
public JobEntryDeleteFilesDialog( Shell parent, JobEntryInterface jobEntryInt, Repository rep, JobMeta jobMeta ) { super( parent, jobEntryInt, rep, jobMeta ); jobEntry = (JobEntryDeleteFiles) jobEntryInt; if ( this.jobEntry.getName() == null ) { this.jobEntry.setName( BaseMessages.getString( PKG, "JobDeleteFiles.Name.Default" ) ); } }
return; jobEntry.setName( wName.getText() ); jobEntry.setIncludeSubfolders( wIncludeSubfolders.getSelection() ); jobEntry.setPrevious( wPrevious.getSelection() ); jobEntry.setFilemasks( fileMasks ); jobEntry.setArguments( arguments );
@Before public void setUp() throws Exception { jobEntry = new JobEntryDeleteFiles(); Job parentJob = mock( Job.class ); doReturn( false ).when( parentJob ).isStopped(); jobEntry.setParentJob( parentJob ); JobMeta mockJobMeta = mock( JobMeta.class ); mockNamedClusterEmbedManager = mock( NamedClusterEmbedManager.class ); when( mockJobMeta.getNamedClusterEmbedManager() ).thenReturn( mockNamedClusterEmbedManager ); jobEntry.setParentJobMeta( mockJobMeta ); jobEntry = spy( jobEntry ); doReturn( true ).when( jobEntry ).processFile( anyString(), anyString(), eq( parentJob ) ); }
logDetailed( BaseMessages.getString( PKG, "JobEntryDeleteFiles.FoundPreviousRows", String .valueOf( ( resultRows != null ? resultRows.size() : 0 ) ) ) ); Multimap<String, String> pathToMaskMap = populateDataForJobExecution( resultRows ); final String filePath = environmentSubstitute( pathToMask.getKey() ); if ( filePath.trim().isEmpty() ) { logDetailed( BaseMessages.getString( PKG, "JobEntryDeleteFiles.NoPathProvided" ) ); final String fileMask = environmentSubstitute( pathToMask.getValue() ); if ( !processFile( filePath, fileMask, parentJob ) ) { numberOfErrFiles++;
@Test public void filesPath_AreProcessed_ArgsOfPreviousMeta() throws Exception { jobEntry.setArgFromPrevious( true ); Result prevMetaResult = new Result(); List<RowMetaAndData> metaAndDataList = new ArrayList<>(); metaAndDataList.add( constructRowMetaAndData( PATH_TO_FILE, null ) ); prevMetaResult.setRows( metaAndDataList ); jobEntry.execute( prevMetaResult, 0 ); verify( jobEntry, times( metaAndDataList.size() ) ).processFile( anyString(), anyString(), any( Job.class ) ); }
changed = jobEntry.hasChanged(); wPrevious = new Button( wSettings, SWT.CHECK ); props.setLook( wPrevious ); wPrevious.setSelection( jobEntry.isArgFromPrevious() ); wPrevious.setToolTipText( BaseMessages.getString( PKG, "JobDeleteFiles.Previous.Tooltip" ) ); fdPrevious = new FormData(); wlFields.setLayoutData( fdlFields ); String[] jobArgs = jobEntry.getArguments(); final int fieldsRows = ( jobArgs == null ) ? 1 : jobArgs.length; wFields.setLayoutData( fdFields ); wlFields.setEnabled( !jobEntry.isArgFromPrevious() ); wFields.setEnabled( !jobEntry.isArgFromPrevious() );
@Test public void filesWithNoPath_AreNotProcessed_ArgsOfPreviousMeta() throws Exception { jobEntry.setArgFromPrevious( true ); Result prevMetaResult = new Result(); List<RowMetaAndData> metaAndDataList = new ArrayList<>(); metaAndDataList.add( constructRowMetaAndData( Const.EMPTY_STRING, null ) ); metaAndDataList.add( constructRowMetaAndData( STRING_SPACES_ONLY, null ) ); prevMetaResult.setRows( metaAndDataList ); jobEntry.execute( prevMetaResult, 0 ); verify( jobEntry, never() ).processFile( anyString(), anyString(), any( Job.class ) ); }
@Test public void filesPathVariables_AreProcessed_OnlyIfValueIsNotBlank() throws Exception { final String pathToFileBlankValue = "pathToFileBlankValue"; final String pathToFileValidValue = "pathToFileValidValue"; jobEntry.setVariable( pathToFileBlankValue, Const.EMPTY_STRING ); jobEntry.setVariable( pathToFileValidValue, PATH_TO_FILE ); jobEntry.setArguments( new String[] { asVariable( pathToFileBlankValue ), asVariable( pathToFileValidValue ) } ); jobEntry.setFilemasks( new String[] { null, null } ); jobEntry.setArgFromPrevious( false ); jobEntry.execute( new Result(), 0 ); verify( jobEntry ).processFile( eq( PATH_TO_FILE ), anyString(), any( Job.class ) ); }
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { argFromPrevious = rep.getJobEntryAttributeBoolean( id_jobentry, "arg_from_previous" ); includeSubfolders = rep.getJobEntryAttributeBoolean( id_jobentry, "include_subfolders" ); int numberOfArgs = rep.countNrJobEntryAttributes( id_jobentry, "name" ); allocate( numberOfArgs ); for ( int i = 0; i < numberOfArgs; i++ ) { arguments[i] = rep.getJobEntryAttributeString( id_jobentry, i, "name" ); filemasks[i] = rep.getJobEntryAttributeString( id_jobentry, i, "filemask" ); } } catch ( KettleException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "JobEntryDeleteFiles.UnableToLoadFromRepo", String .valueOf( id_jobentry ) ), dbe ); } }
@Test public void specifyingTheSamePath_WithDifferentWildcards() throws Exception { final String fileExtensionTxt = ".txt"; final String fileExtensionXml = ".xml"; String[] args = new String[] { PATH_TO_FILE, PATH_TO_FILE }; jobEntry.setArguments( args ); jobEntry.setFilemasks( new String[] { fileExtensionTxt, fileExtensionXml } ); jobEntry.setArgFromPrevious( false ); jobEntry.execute( new Result(), 0 ); verify( jobEntry ).processFile( eq( PATH_TO_FILE ), eq( fileExtensionTxt ), any( Job.class ) ); verify( jobEntry ).processFile( eq( PATH_TO_FILE ), eq( fileExtensionXml ), any( Job.class ) ); }
public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep, IMetaStore metaStore ) throws KettleXMLException { try { super.loadXML( entrynode, databases, slaveServers ); argFromPrevious = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "arg_from_previous" ) ); includeSubfolders = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "include_subfolders" ) ); Node fields = XMLHandler.getSubNode( entrynode, "fields" ); int numberOfFields = XMLHandler.countNodes( fields, "field" ); allocate( numberOfFields ); for ( int i = 0; i < numberOfFields; i++ ) { Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i ); arguments[i] = XMLHandler.getTagValue( fnode, "name" ); filemasks[i] = XMLHandler.getTagValue( fnode, "filemask" ); } } catch ( KettleXMLException xe ) { throw new KettleXMLException( BaseMessages.getString( PKG, "JobEntryDeleteFiles.UnableToLoadFromXml" ), xe ); } }
@Test public void filesWithPath_AreProcessed_ArgsOfCurrentJob() throws Exception { String[] args = new String[] { PATH_TO_FILE }; jobEntry.setArguments( args ); jobEntry.setFilemasks( new String[] { null, null } ); jobEntry.setArgFromPrevious( false ); jobEntry.execute( new Result(), 0 ); verify( jobEntry, times( args.length ) ).processFile( anyString(), anyString(), any( Job.class ) ); verify( mockNamedClusterEmbedManager ).passEmbeddedMetastoreKey( anyObject(), anyString() ); }