/** * Get an array of the names of all the Values in the Row. * * @return an array of Strings: the names of all the Values in the Row. */ @Override public String[] getFieldNames() { lock.readLock().lock(); try { String[] retval = new String[ size() ]; for ( int i = 0; i < size(); i++ ) { String valueName = getValueMeta( i ).getName(); retval[i] = valueName == null ? "" : valueName; } return retval; } finally { lock.readLock().unlock(); } }
/** * @param rowData the row of data to serialize as XML * @return an XML representation of the row data * @throws IOException Thrown in case there is an (Base64/GZip) encoding problem */ @Override public String getDataXML( Object[] rowData ) throws IOException { StringBuilder xml = new StringBuilder(); xml.append( "<" ).append( XML_DATA_TAG ).append( ">" ); lock.readLock().lock(); try { for ( int i = 0; i < size(); i++ ) { xml.append( getValueMeta( i ).getDataXML( rowData[ i ] ) ); } } finally { lock.readLock().unlock(); } xml.append( "</" ).append( XML_DATA_TAG ).append( ">" ); return xml.toString(); }
/** * Get the string representation of the data in a row of data * * @param row the row of data to convert to string * @return the row of data in string form * @throws KettleValueException in case of a conversion error */ @Override public String getString( Object[] row ) throws KettleValueException { lock.readLock().lock(); try { StringBuilder buffer = new StringBuilder(); for ( int i = 0; i < size(); i++ ) { if ( i > 0 ) { buffer.append( ", " ); } buffer.append( "[" ); buffer.append( getString( row, i ) ); buffer.append( "]" ); } return buffer.toString(); } finally { lock.readLock().unlock(); } }
/** * @return an XML representation of the row metadata * @throws IOException Thrown in case there is an (Base64/GZip) encoding problem */ @Override public String getMetaXML() throws IOException { StringBuilder xml = new StringBuilder(); xml.append( "<" ).append( XML_META_TAG ).append( ">" ); lock.readLock().lock(); try { for ( int i = 0; i < size(); i++ ) { xml.append( getValueMeta( i ).getMetaXML() ); } } finally { lock.readLock().unlock(); } xml.append( "</" ).append( XML_META_TAG ).append( ">" ); return xml.toString(); }
/** * Write ONLY the specified metadata to the outputStream * * @throws KettleFileException in case things go awry */ @Override public void writeMeta( DataOutputStream outputStream ) throws KettleFileException { lock.readLock().lock(); try { // First handle the number of fields in a row try { outputStream.writeInt( size() ); } catch ( IOException e ) { throw new KettleFileException( "Unable to write nr of metadata values", e ); } // Write all values in the row for ( int i = 0; i < size(); i++ ) { getValueMeta( i ).writeMeta( outputStream ); } } finally { lock.readLock().unlock(); } }
@Override public Object[] readData( DataInputStream inputStream ) throws KettleFileException, SocketTimeoutException { lock.readLock().lock(); try { Object[] data = new Object[ size() ]; for ( int i = 0; i < size(); i++ ) { data[ i ] = getValueMeta( i ).readData( inputStream ); } if ( size() == 0 ) { try { inputStream.readBoolean(); } catch ( EOFException e ) { throw new KettleEOFException( e ); } catch ( SocketTimeoutException e ) { throw e; } catch ( IOException e ) { throw new KettleFileException( toString() + " : Unable to read the marker flag data from input stream", e ); } } return data; } finally { lock.readLock().unlock(); } }
/** * Compare 2 rows with each other using all values in the rows and also considering the specified ascending clauses of * the value metadata. * * @param rowData1 The first row of data * @param rowData2 The second row of data * @return 0 if the rows are considered equal, -1 is data1 is smaller, 1 if data2 is smaller. * @throws KettleValueException */ @Override public int compare( Object[] rowData1, Object[] rowData2 ) throws KettleValueException { lock.readLock().lock(); try { for ( int i = 0; i < size(); i++ ) { ValueMetaInterface valueMeta = getValueMeta( i ); int cmp = valueMeta.compare( rowData1[ i ], rowData2[ i ] ); if ( cmp != 0 ) { return cmp; } } return 0; } finally { lock.readLock().unlock(); } }
/** * Calculate a hashCode of the content (not the index) of the data specified NOTE: This method uses a simple XOR of * the individual hashCodes which can result in a lot of collisions for similar types of data (e.g. [A,B] == [B,A] and * is not suitable for normal use. It is kept to provide backward compatibility with CombinationLookup.lookupValues() * * @param rowData The data to calculate a hashCode with * @return the calculated hashCode * @throws KettleValueException in case there is a data conversion error * @deprecated */ @Override @Deprecated public int oldXORHashCode( Object[] rowData ) throws KettleValueException { int hash = 0; lock.readLock().lock(); try { for ( int i = 0; i < size(); i++ ) { ValueMetaInterface valueMeta = getValueMeta( i ); hash ^= valueMeta.hashCode( rowData[ i ] ); } return hash; } finally { lock.readLock().unlock(); } }
/** * Convert an XML node into binary data using the row metadata supplied. * * @param node The data row node * @return a row of data, converted from XML * @throws IOException Thrown in case there is an (Base64/GZip) decoding problem */ @Override public Object[] getRow( Node node ) throws KettleException { lock.readLock().lock(); try { Object[] rowData = RowDataUtil.allocateRowData( size() ); for ( int i = 0; i < size(); i++ ) { Node valueDataNode = XMLHandler.getSubNodeByNr( node, ValueMeta.XML_DATA_TAG, i ); rowData[ i ] = getValueMeta( i ).getValue( valueDataNode ); } return rowData; } finally { lock.readLock().unlock(); } }
@Override public Object[] apply( Map<Integer, Optional<Object>> input ) { Object[] row = new Object[rowMeta.size()]; for ( Map.Entry<Integer, Optional<Object>> entry : input.entrySet() ) { row[entry.getKey()] = entry.getValue().orNull(); } return row; } } );
/** * Get an array of strings showing the name of the values in the row padded to a maximum length, followed by the types * of the values. * * @param maxlen The length to which the name will be padded. * @return an array of strings: the names and the types of the fieldnames in the row. */ @Override public String[] getFieldNamesAndTypes( int maxlen ) { lock.readLock().lock(); try { final int size = size(); String[] retval = new String[ size ]; for ( int i = 0; i < size; i++ ) { ValueMetaInterface v = getValueMeta( i ); retval[ i ] = Const.rightPad( v.getName(), maxlen ) + " (" + v.getTypeDesc() + ")"; } return retval; } finally { lock.readLock().unlock(); } }
private void addColumn( ValueMetaInterface meta, Object... values ) { int column = rowMeta.size(); rowMeta.addValueMeta( meta ); for ( int row = 0; row < values.length; row++ ) { data.put( row, column, Optional.fromNullable( values[row] ) ); } }
@Test public void getObjectInformation_ExistingJob_IsDeletedFlagNotSet() throws Exception { KettleDatabaseRepositoryJobDelegate jobDelegate = spy( new KettleDatabaseRepositoryJobDelegate( repository ) ); RowMeta meta = createMetaForJob(); Object[] values = new Object[ meta.size() ]; values[ Arrays.asList( meta.getFieldNames() ).indexOf( KettleDatabaseRepositoryBase.FIELD_JOB_NAME ) ] = EXISTING_ID; doReturn( new RowMetaAndData( meta, values ) ) .when( jobDelegate ) .getJob( new StringObjectId( EXISTING_ID ) ); assertIsDeletedNotSet_ForExistingObject( null, jobDelegate, RepositoryObjectType.JOB ); }
@Test public void getObjectInformation_ExistingTrans_IsDeletedFlagNotSet() throws Exception { KettleDatabaseRepositoryTransDelegate transDelegate = spy( new KettleDatabaseRepositoryTransDelegate( repository ) ); RowMeta meta = createMetaForJob(); Object[] values = new Object[ meta.size() ]; values[ Arrays.asList( meta.getFieldNames() ).indexOf( KettleDatabaseRepositoryBase.FIELD_TRANSFORMATION_NAME ) ] = EXISTING_ID; doReturn( new RowMetaAndData( meta, values ) ) .when( transDelegate ) .getTransformation( new StringObjectId( EXISTING_ID ) ); assertIsDeletedNotSet_ForExistingObject( transDelegate, null, RepositoryObjectType.TRANSFORMATION ); }
@Test public void getObjectInformation_AbsentJob_IsDeletedFlagSet() throws Exception { KettleDatabaseRepositoryJobDelegate jobDelegate = spy( new KettleDatabaseRepositoryJobDelegate( repository ) ); RowMeta meta = createMetaForJob(); doReturn( new RowMetaAndData( meta, new Object[ meta.size() ] ) ) .when( jobDelegate ) .getJob( new StringObjectId( ABSENT_ID ) ); assertIsDeletedSet_ForAbsentObject( null, jobDelegate, RepositoryObjectType.JOB ); }
@Test public void getObjectInformation_AbsentTrans_IsDeletedFlagSet() throws Exception { KettleDatabaseRepositoryTransDelegate transDelegate = spy( new KettleDatabaseRepositoryTransDelegate( repository ) ); RowMeta meta = createMetaForTrans(); doReturn( new RowMetaAndData( meta, new Object[ meta.size() ] ) ) .when( transDelegate ) .getTransformation( new StringObjectId( ABSENT_ID ) ); assertIsDeletedSet_ForAbsentObject( transDelegate, null, RepositoryObjectType.TRANSFORMATION ); }
@Test public void getFields() throws Exception { MappingInputMeta meta = new MappingInputMeta(); meta.setInputRowMeta( inputRowMeta ); meta.setValueRenames( renames ); meta.allocate( fields.length ); meta.setFieldName( fields ); meta.setSelectingAndSortingUnspecifiedFields( sortUnspecified ); RowMeta rowMeta = new RowMeta(); meta.getFields( rowMeta, "origin", new RowMetaInterface[ 0 ], null, null, null, null ); assertEquals( Arrays.toString( expectedOutputFields ), expectedOutputFields.length, rowMeta.size() ); for ( int i = 0; i < rowMeta.size(); i++ ) { assertEquals( String.format( "Element %d", i ), expectedOutputFields[ i ], rowMeta.getValueMeta( i ).getName() ); } } }
@Test public void getObjectInformation_GetDatabaseInformation() throws Exception { KettleDatabaseRepositoryDatabaseDelegate databaseDelegate = spy( new KettleDatabaseRepositoryDatabaseDelegate( repository ) ); repository.databaseDelegate = databaseDelegate; RowMeta meta = createMetaForDatabase(); Object[] values = new Object[ meta.size() ]; values[ Arrays.asList( meta.getFieldNames() ).indexOf( KettleDatabaseRepositoryBase.FIELD_DATABASE_NAME ) ] = EXISTING_ID; doReturn( new RowMetaAndData( meta, values ) ) .when( databaseDelegate ) .getDatabase( new StringObjectId( EXISTING_ID ) ); RepositoryObject actual = repository.getObjectInformation( new StringObjectId( EXISTING_ID ), RepositoryObjectType.DATABASE ); assertEquals( new StringObjectId( EXISTING_ID ), actual.getObjectId() ); assertEquals( EXISTING_ID, actual.getName() ); assertEquals( RepositoryObjectType.DATABASE, actual.getObjectType() ); }
@Test public void testGetFields() throws KettleStepException { DetectLastRowMeta meta = new DetectLastRowMeta(); meta.setDefault(); meta.setResultFieldName( "The Result" ); RowMeta rowMeta = new RowMeta(); meta.getFields( rowMeta, "this step", null, null, new Variables(), null, null ); assertEquals( 1, rowMeta.size() ); assertEquals( "The Result", rowMeta.getValueMeta( 0 ).getName() ); assertEquals( ValueMetaInterface.TYPE_BOOLEAN, rowMeta.getValueMeta( 0 ).getType() ); }
@Test public void testGetFields() throws KettleStepException { CreditCardValidatorMeta meta = new CreditCardValidatorMeta(); meta.setDefault(); meta.setResultFieldName( "The Result Field" ); meta.setCardType( "The Card Type Field" ); meta.setNotValidMsg( "Is Card Valid" ); RowMeta rowMeta = new RowMeta(); meta.getFields( rowMeta, "this step", null, null, new Variables(), null, null ); assertEquals( 3, rowMeta.size() ); assertEquals( "The Result Field", rowMeta.getValueMeta( 0 ).getName() ); assertEquals( ValueMetaInterface.TYPE_BOOLEAN, rowMeta.getValueMeta( 0 ).getType() ); assertEquals( "this step", rowMeta.getValueMeta( 0 ).getOrigin() ); assertEquals( "The Card Type Field", rowMeta.getValueMeta( 1 ).getName() ); assertEquals( ValueMetaInterface.TYPE_STRING, rowMeta.getValueMeta( 1 ).getType() ); assertEquals( "this step", rowMeta.getValueMeta( 1 ).getOrigin() ); assertEquals( "Is Card Valid", rowMeta.getValueMeta( 2 ).getName() ); assertEquals( ValueMetaInterface.TYPE_STRING, rowMeta.getValueMeta( 2 ).getType() ); assertEquals( "this step", rowMeta.getValueMeta( 2 ).getOrigin() ); } }