private void ok() { if ( Utils.isEmpty( wStepname.getText() ) ) { return; } stepname = wStepname.getText(); // return value input.setTypeField( wTypefield.getText() ); int i; // Table table = wFields.table; int nrfields = wFields.nrNonEmpty(); input.allocate( nrfields ); //CHECKSTYLE:Indentation:OFF for ( i = 0; i < nrfields; i++ ) { TableItem item = wFields.getNonEmpty( i ); input.getNormaliserFields()[i].setName( item.getText( NAME_INDEX ) ); input.getNormaliserFields()[i].setValue( item.getText( VALUE_INDEX ) ); input.getNormaliserFields()[i].setNorm( item.getText( NORM_INDEX ) ); } dispose(); }
public void getData() { if ( input.getTypeField() != null ) { wTypefield.setText( input.getTypeField() ); } for ( int i = 0; i < input.getNormaliserFields().length; i++ ) { TableItem item = wFields.table.getItem( i ); if ( input.getNormaliserFields()[i].getName() != null ) { item.setText( NAME_INDEX, input.getNormaliserFields()[i].getName() ); } if ( input.getNormaliserFields()[i].getValue() != null ) { item.setText( VALUE_INDEX, input.getNormaliserFields()[i].getValue() ); } if ( input.getNormaliserFields()[i].getNorm() != null ) { item.setText( NORM_INDEX, input.getNormaliserFields()[i].getNorm() ); } } wFields.setRowNums(); wFields.optWidth( true ); wStepname.selectAll(); wStepname.setFocus(); }
@Override public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException { readData( stepnode ); }
@Override public void modify( StepMetaInterface someMeta ) { if ( someMeta instanceof NormaliserMeta ) { ( (NormaliserMeta) someMeta ).allocate( 5 ); } }
meta.getFields( data.outputRowMeta, getStepname(), null, null, this, repository, metaStore ); int normFieldsLength = meta.getNormaliserFields().length; data.typeToFieldIndex = new HashMap<>(); String typeValue; typeValue = meta.getNormaliserFields()[i].getValue(); if ( !data.type_occ.contains( typeValue ) ) { data.type_occ.add( typeValue ); dataFieldNr = data.inputRowMeta.indexOfValue( meta.getNormaliserFields()[i].getName() ); if ( dataFieldNr < 0 ) { logError( BaseMessages.getString( PKG, "Normaliser.Log.CouldNotFindFieldInRow", meta.getNormaliserFields()[i].getName() ) ); setErrors( 1 ); stopAll(); Set<String> normaliserFields = meta.getFieldNames(); int irmSize = data.inputRowMeta.size();
@Test public void testNormaliserProcessRowsWikiData() throws Exception { // We should have 1 row as input to the normaliser and 3 rows as output to the normaliser with the data // shown on the Wiki page: http://wiki.pentaho.com/display/EAI/Row+Normaliser // // // Data input looks like this: // // DATE PR1_NR PR_SL PR2_NR PR2_SL PR3_NR PR3_SL // 2003010 5 100 10 250 4 150 // // Data output looks like this: // // DATE Type Product Sales Product Number // 2003010 Product1 100 5 // 2003010 Product2 250 10 // 2003010 Product3 150 4 // final String stepName = "Row Normaliser"; NormaliserMeta stepMeta = new NormaliserMeta(); stepMeta.setDefault(); stepMeta.setNormaliserFields( getTestNormaliserFieldsWiki() ); stepMeta.setTypeField( "Type" ); TransMeta transMeta = TransTestFactory.generateTestTransformation( null, stepMeta, stepName ); List<RowMetaAndData> inputList = getWikiInputRowMetaAndData(); List<RowMetaAndData> outputList = TransTestFactory.executeTestTransformation( transMeta, TransTestFactory.INJECTOR_STEPNAME, stepName, TransTestFactory.DUMMY_STEPNAME, inputList ); List<RowMetaAndData> expectedOutput = this.getExpectedWikiOutputRowMetaAndData(); checkResults( expectedOutput, outputList ); }
public String get() { return meta.getNormaliserFields()[0].getName(); } } );
changed = input.hasChanged(); final int fieldsRows = input.getNormaliserFields().length;
@Before public void setup() { setup( new NormaliserMeta() ); }
@Override public Object clone() { NormaliserMeta retval = (NormaliserMeta) super.clone(); int nrfields = normaliserFields.length; retval.allocate( nrfields ); for ( int i = 0; i < nrfields; i++ ) { retval.normaliserFields[i] = (NormaliserField) normaliserFields[i].clone(); } return retval; }
public String get() { return meta.getNormaliserFields()[0].getValue(); } } );
@Override public void setDefault() { typeField = "typefield"; int nrfields = 0; allocate( nrfields ); for ( int i = 0; i < nrfields; i++ ) { normaliserFields[i].setName( "field" + i ); normaliserFields[i].setValue( "value" + i ); normaliserFields[i].setNorm( "value" + i ); } }
public String get() { return meta.getNormaliserFields()[0].getNorm(); } } );
@Override public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException { try { typeField = rep.getStepAttributeString( id_step, "typefield" ); int nrfields = rep.countNrStepAttributes( id_step, "field_name" ); allocate( nrfields ); for ( int i = 0; i < nrfields; i++ ) { normaliserFields[i].setName( rep.getStepAttributeString( id_step, i, "field_name" ) ); normaliserFields[i].setValue( rep.getStepAttributeString( id_step, i, "field_value" ) ); normaliserFields[i].setNorm( rep.getStepAttributeString( id_step, i, "field_norm" ) ); } } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( PKG, "NormaliserMeta.Exception.UnexpectedErrorReadingStepInfoFromRepository" ), e ); } }
private void readData( Node stepnode ) throws KettleXMLException { try { typeField = XMLHandler.getTagValue( stepnode, "typefield" ); Node fields = XMLHandler.getSubNode( stepnode, "fields" ); int nrfields = XMLHandler.countNodes( fields, "field" ); allocate( nrfields ); for ( int i = 0; i < nrfields; i++ ) { Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i ); normaliserFields[i].setName( XMLHandler.getTagValue( fnode, "name" ) ); normaliserFields[i].setValue( XMLHandler.getTagValue( fnode, "value" ) ); normaliserFields[i].setNorm( XMLHandler.getTagValue( fnode, "norm" ) ); } } catch ( Exception e ) { throw new KettleXMLException( BaseMessages.getString( PKG, "NormaliserMeta.Exception.UnableToLoadStepInfoFromXML" ), e ); } }