typeValue = meta.getNormaliserFields()[i].getValue(); if ( !data.type_occ.contains( typeValue ) ) { data.type_occ.add( typeValue ); dataFieldNr = data.inputRowMeta.indexOfValue( meta.getNormaliserFields()[i].getName() ); if ( dataFieldNr < 0 ) { logError( BaseMessages.getString( PKG, "Normaliser.Log.CouldNotFindFieldInRow", meta.getNormaliserFields()[i].getName() ) ); setErrors( 1 ); stopAll();
int maxlen = 0; for ( int i = 0; i < normaliserFields.length; i++ ) { if ( !norm_occ.contains( normaliserFields[i].getNorm() ) ) { norm_occ.add( normaliserFields[i].getNorm() ); field_occ.add( normaliserFields[i].getName() ); if ( normaliserFields[i].getValue().length() > maxlen ) { maxlen = normaliserFields[i].getValue().length(); int idx = row.indexOfValue( normaliserFields[i].getName() ); if ( idx >= 0 ) { row.removeValueMeta( idx );
private void ok() { if ( Utils.isEmpty( wStepname.getText() ) ) { return; } stepname = wStepname.getText(); // return value input.setTypeField( wTypefield.getText() ); int i; // Table table = wFields.table; int nrfields = wFields.nrNonEmpty(); input.allocate( nrfields ); //CHECKSTYLE:Indentation:OFF for ( i = 0; i < nrfields; i++ ) { TableItem item = wFields.getNonEmpty( i ); input.getNormaliserFields()[i].setName( item.getText( NAME_INDEX ) ); input.getNormaliserFields()[i].setValue( item.getText( VALUE_INDEX ) ); input.getNormaliserFields()[i].setNorm( item.getText( NORM_INDEX ) ); } dispose(); }
String lufield = normaliserFields[i].getName();
private NormaliserMeta.NormaliserField[] getTestNormaliserFieldsWiki() { NormaliserMeta.NormaliserField[] rtn = new NormaliserMeta.NormaliserField[6]; rtn[0] = new NormaliserMeta.NormaliserField(); rtn[0].setName( "pr_sl" ); rtn[0].setNorm( "Product Sales" ); rtn[0].setValue( "Product1" ); // Type rtn[1] = new NormaliserMeta.NormaliserField(); rtn[1].setName( "pr1_nr" ); rtn[1].setNorm( "Product Number" ); rtn[1].setValue( "Product1" ); rtn[2] = new NormaliserMeta.NormaliserField(); rtn[2].setName( "pr2_sl" ); rtn[2].setNorm( "Product Sales" ); rtn[2].setValue( "Product2" ); rtn[3] = new NormaliserMeta.NormaliserField(); rtn[3].setName( "pr2_nr" ); rtn[3].setNorm( "Product Number" ); rtn[3].setValue( "Product2" ); rtn[4] = new NormaliserMeta.NormaliserField(); rtn[4].setName( "pr3_sl" ); rtn[4].setNorm( "Product Sales" ); rtn[4].setValue( "Product3" ); rtn[5] = new NormaliserMeta.NormaliserField(); rtn[5].setName( "pr3_nr" ); rtn[5].setNorm( "Product Number" ); rtn[5].setValue( "Product3" ); return rtn; }
public void getData() { if ( input.getTypeField() != null ) { wTypefield.setText( input.getTypeField() ); } for ( int i = 0; i < input.getNormaliserFields().length; i++ ) { TableItem item = wFields.table.getItem( i ); if ( input.getNormaliserFields()[i].getName() != null ) { item.setText( NAME_INDEX, input.getNormaliserFields()[i].getName() ); } if ( input.getNormaliserFields()[i].getValue() != null ) { item.setText( VALUE_INDEX, input.getNormaliserFields()[i].getValue() ); } if ( input.getNormaliserFields()[i].getNorm() != null ) { item.setText( NORM_INDEX, input.getNormaliserFields()[i].getNorm() ); } } wFields.setRowNums(); wFields.optWidth( true ); wStepname.selectAll(); wStepname.setFocus(); }
@Override public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException { try { typeField = rep.getStepAttributeString( id_step, "typefield" ); int nrfields = rep.countNrStepAttributes( id_step, "field_name" ); allocate( nrfields ); for ( int i = 0; i < nrfields; i++ ) { normaliserFields[i].setName( rep.getStepAttributeString( id_step, i, "field_name" ) ); normaliserFields[i].setValue( rep.getStepAttributeString( id_step, i, "field_value" ) ); normaliserFields[i].setNorm( rep.getStepAttributeString( id_step, i, "field_norm" ) ); } } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( PKG, "NormaliserMeta.Exception.UnexpectedErrorReadingStepInfoFromRepository" ), e ); } }
private void readData( Node stepnode ) throws KettleXMLException { try { typeField = XMLHandler.getTagValue( stepnode, "typefield" ); Node fields = XMLHandler.getSubNode( stepnode, "fields" ); int nrfields = XMLHandler.countNodes( fields, "field" ); allocate( nrfields ); for ( int i = 0; i < nrfields; i++ ) { Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i ); normaliserFields[i].setName( XMLHandler.getTagValue( fnode, "name" ) ); normaliserFields[i].setValue( XMLHandler.getTagValue( fnode, "value" ) ); normaliserFields[i].setNorm( XMLHandler.getTagValue( fnode, "norm" ) ); } } catch ( Exception e ) { throw new KettleXMLException( BaseMessages.getString( PKG, "NormaliserMeta.Exception.UnableToLoadStepInfoFromXML" ), e ); } }
@Test public void loadSaveTest() throws KettleException { List<String> attributes = Arrays.asList( "normaliserFields" ); NormaliserField testField = new NormaliserField(); testField.setName( "TEST_NAME" ); testField.setValue( "TEST_VALUE" ); testField.setNorm( "TEST" ); Map<String, FieldLoadSaveValidator<?>> fieldLoadSaveValidatorTypeMap = new HashMap<String, FieldLoadSaveValidator<?>>(); fieldLoadSaveValidatorTypeMap.put( NormaliserField[].class.getCanonicalName(), new ArrayLoadSaveValidator<NormaliserField>( new NormaliserFieldLoadSaveValidator( testField ), 50 ) ); LoadSaveTester<NormaliserMeta> tester = new LoadSaveTester<NormaliserMeta>( NormaliserMeta.class, attributes, new HashMap<String, String>(), new HashMap<String, String>(), new HashMap<String, FieldLoadSaveValidator<?>>(), fieldLoadSaveValidatorTypeMap ); tester.testSerialization(); }
@Override public String getXML() { StringBuilder retval = new StringBuilder(); retval.append( " " + XMLHandler.addTagValue( "typefield", typeField ) ); retval.append( " <fields>" ); for ( int i = 0; i < normaliserFields.length; i++ ) { retval.append( " <field>" ); retval.append( " " + XMLHandler.addTagValue( "name", normaliserFields[i].getName() ) ); retval.append( " " + XMLHandler.addTagValue( "value", normaliserFields[i].getValue() ) ); retval.append( " " + XMLHandler.addTagValue( "norm", normaliserFields[i].getNorm() ) ); retval.append( " </field>" ); } retval.append( " </fields>" ); return retval.toString(); }
public String get() { return meta.getNormaliserFields()[0].getValue(); } } );
public String get() { return meta.getNormaliserFields()[0].getNorm(); } } );
@Override public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException { try { rep.saveStepAttribute( id_transformation, id_step, "typefield", typeField ); for ( int i = 0; i < normaliserFields.length; i++ ) { rep.saveStepAttribute( id_transformation, id_step, i, "field_name", normaliserFields[i].getName() ); rep.saveStepAttribute( id_transformation, id_step, i, "field_value", normaliserFields[i].getValue() ); rep.saveStepAttribute( id_transformation, id_step, i, "field_norm", normaliserFields[i].getNorm() ); } } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( PKG, "NormaliserMeta.Exception.UnableToSaveStepInfoToRepository" ) + id_step, e ); } }
public String get() { return meta.getNormaliserFields()[0].getName(); } } );
@Override public boolean validateTestObject( NormaliserField testObject, Object actual ) { return testObject.equals( actual ); }
@Override public boolean validateTestObject( NormaliserMeta.NormaliserField testObject, Object actual ) { if ( !( actual instanceof NormaliserMeta.NormaliserField ) ) { return false; } NormaliserMeta.NormaliserField another = (NormaliserMeta.NormaliserField) actual; return new EqualsBuilder() .append( testObject.getName(), another.getName() ) .append( testObject.getNorm(), another.getNorm() ) .append( testObject.getValue(), another.getValue() ) .isEquals(); } }
@Override public Object clone() { NormaliserMeta retval = (NormaliserMeta) super.clone(); int nrfields = normaliserFields.length; retval.allocate( nrfields ); for ( int i = 0; i < nrfields; i++ ) { retval.normaliserFields[i] = (NormaliserField) normaliserFields[i].clone(); } return retval; }
public void allocate( int nrfields ) { normaliserFields = new NormaliserField[nrfields]; for ( int i = 0; i < nrfields; i++ ) { normaliserFields[i] = new NormaliserField(); } }
public Set<String> getFieldNames() { Set<String> fieldNames = new HashSet<>( ); String s; for ( int i = 0; i < normaliserFields.length; i++ ) { s = normaliserFields[i].getName(); if ( s != null ) { fieldNames.add( s.toLowerCase() ); } } return fieldNames; }