public KettleFileNotFoundException( String message, String filepath ) { super( message ); setFilepath( filepath ); }
@Override protected void handle( final PluginProperty property ) throws KettleException { throw new KettleException( MESSAGE ); }
@Override public String getMessage() { StringBuilder message = new StringBuilder( super.getMessage() ); message.append( getPluginsMessage() ); return message.toString(); }
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { foldername = rep.getJobEntryAttributeString( id_jobentry, "foldername" ); specifywildcard = rep.getJobEntryAttributeBoolean( id_jobentry, "specify_wildcard" ); wildcard = rep.getJobEntryAttributeString( id_jobentry, "wildcard" ); wildcardexclude = rep.getJobEntryAttributeString( id_jobentry, "wildcardexclude" ); } catch ( KettleException dbe ) { throw new KettleXMLException( BaseMessages.getString( PKG, "JobEntryDeleteResultFilenames.CanNotLoadFromRep", "" + id_jobentry, dbe.getMessage() ) ); } }
@Override public void rowWrittenEvent( RowMetaInterface rowMeta, Object[] row ) throws KettleStepException { try { assertEquals( expectedResult, row[ 2 ] ); } catch ( Exception pe ) { throw new KettleStepException( pe ); } } } );
@Override public void handleEvent( Event event ) { try { event.doit = Spoon.getInstance().quitFile( false ); } catch ( KettleException e ) { e.printStackTrace(); } }
public void closeInsert() throws KettleDatabaseException { if ( prepStatementInsert != null ) { try { prepStatementInsert.close(); prepStatementInsert = null; } catch ( SQLException e ) { throw new KettleDatabaseException( "Error closing insert prepared statement.", e ); } } }
@Override public Date getDate( Object object ) throws KettleValueException { throw new KettleValueException( toStringMeta() + ": it's not possible to convert from Internet Address to a date" ); }
private void processDeleteException( Throwable e ) throws KettleException { throw new KettleException( "Unable to complete revision deletion", e ); }
public void afterLast( ResultSet rs ) throws KettleDatabaseException { try { rs.afterLast(); } catch ( SQLException e ) { throw new KettleDatabaseException( "Unable to move resultset to after the last position", e ); } }
public Long getInteger( String valueName ) throws KettleValueException { int idx = rowMeta.indexOfValue( valueName ); if ( idx < 0 ) { throw new KettleValueException( "Unknown column '" + valueName + "'" ); } return rowMeta.getInteger( data, idx ); }
public void closeFile() throws KettleException { try { if ( data.s3ObjectInputStream != null ) { data.s3ObjectInputStream.close(); } } catch ( IOException e ) { throw new KettleException( "Unable to close file channel for file '" + data.filenames[data.filenr - 1], e ); } } }
@Override public void registerException( LogChannelInterface log, Exception e, Class<?> packageClass, String key, String... parameters ) throws KettleDatabaseException { throw new KettleDatabaseException( BaseMessages.getString( packageClass, key, parameters ), e ); } }
private synchronized void closePipe() throws KettleException { try { if ( data != null ) { data.dispose(); // gtf: OutputStream gets closed here } } catch ( Exception e ) { throw new KettleException( e ); // FIX FOR IB TICKET #390822 } finally { triedToClosePipe = true; } }
@Override public ObjectId getPartitionSchemaID( String name ) throws KettleException { try { return getObjectId( name, null, RepositoryObjectType.PARTITION_SCHEMA, false ); } catch ( Exception e ) { throw new KettleException( "Unable to get ID for partition schema [" + name + "]", e ); } }
@Override public ObjectId getSlaveID( String name ) throws KettleException { try { return getObjectId( name, null, RepositoryObjectType.SLAVE_SERVER, false ); } catch ( Exception e ) { throw new KettleException( "Unable to get ID for slave server with name [" + name + "]", e ); } }
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ) throws KettleException { try { for ( int i = 0; i < stepName.length; i++ ) { rep.saveStepAttribute( id_transformation, id_step, i, "step_name", stepName[i] ); } } catch ( Exception e ) { throw new KettleException( "Unable to save step information to the repository for id_step=" + id_step, e ); } }
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { xmlfilename = rep.getJobEntryAttributeString( id_jobentry, "xmlfilename" ); xsdfilename = rep.getJobEntryAttributeString( id_jobentry, "xsdfilename" ); } catch ( KettleException dbe ) { throw new KettleException( "Unable to load job entry of type 'xsdvalidator' from the repository for id_jobentry=" + id_jobentry, dbe ); } }
public void readRep(Repository rep, ObjectId id_step, List<DatabaseMeta> databases, Map<String, Counter> counters) throws KettleException { try { shapeFilename = rep.getStepAttributeString (id_step, "shapefilename" ); dbfFilename = rep.getStepAttributeString (id_step, "dbffilename" ); } catch(Exception e) { throw new KettleException("Unexpected error reading step information from the repository", e); } }
@Override public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ) throws KettleException { try { outputRowcount = rep.getStepAttributeBoolean( id_step, "outputRowcount" ); rowcountField = rep.getStepAttributeString( id_step, "rowcountField" ); } catch ( Exception e ) { throw new KettleException( "Unexpected error reading step information from the repository", e ); } }