@Override public String getMessage() { StringBuilder message = new StringBuilder( super.getMessage() ); message.append( getPluginsMessage() ); return message.toString(); }
@Override public String getMessage() { String msgText = BaseMessages.getString( PKG, "KettleCellValueException.CannotConvertFieldFromCell", Integer .toString( sheetnr ), Integer.toString( rownr ), Integer.toString( colnr ), fieldName, super .getMessage() ); return msgText; }
@Override public int compare( QueueEntry a, QueueEntry b ) { try { int cmp = data.metas[a.index].compare( a.row, data.metas[b.index], b.row, data.keyNrs[a.index], data.keyNrs[b.index] ); return cmp > 0 ? 1 : cmp < 0 ? -1 : 0; } catch ( KettleException e ) { throw new RuntimeException( e.getMessage() ); } } }
public void run( IProgressMonitor monitor ) throws InvocationTargetException, InterruptedException { // This is running in a new process: copy some KettleVariables info // LocalVariables.getInstance().createKettleVariables(Thread.currentThread(), parentThread, true); // --> don't set variables if not running in different thread --> pmd.run(true,true, op); try { stats = transMeta.getSQLStatements( new ProgressMonitorAdapter( monitor ) ); } catch ( KettleException e ) { throw new InvocationTargetException( e, BaseMessages.getString( PKG, "GetSQLProgressDialog.RuntimeError.UnableToGenerateSQL.Exception", e.getMessage() ) ); } } };
/** * Gets the shared objects. * * @return the sharedObjects */ public SharedObjects getSharedObjects() { if ( sharedObjects == null ) { try { String soFile = environmentSubstitute( sharedObjectsFile ); sharedObjects = new SharedObjects( soFile ); } catch ( KettleException e ) { LogChannel.GENERAL.logDebug( e.getMessage(), e ); } } return sharedObjects; }
@Test public void testErrorReadingFile() throws Exception { when( repoMeta.getKettleUserRepositoriesFile() ).thenReturn( getClass().getResource( "bad-repositories.xml" ).getPath() ); try { repoMeta.readData(); } catch ( KettleException e ) { assertEquals( Const.CR + "Error reading information from file:" + Const.CR + "The element type \"repositories\" must be terminated by the matching end-tag \"</repositories>\"." + Const.CR, e.getMessage() ); } }
@Test public void testErrorReadingInputStream() throws Exception { try { repoMeta.readDataFromInputStream( getClass().getResourceAsStream( "filedoesnotexist.xml" ) ); } catch ( KettleException e ) { assertEquals( Const.CR + "Error reading information from file:" + Const.CR + "InputStream cannot be null" + Const.CR, e.getMessage() ); } }
@Test public void testErrorWritingFile() throws Exception { when( repoMeta.getKettleUserRepositoriesFile() ).thenReturn( null ); try { repoMeta.writeData(); } catch ( KettleException e ) { assertTrue( e.getMessage().startsWith( Const.CR + "Error writing repositories metadata" ) ); } }
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { foldername = rep.getJobEntryAttributeString( id_jobentry, "foldername" ); specifywildcard = rep.getJobEntryAttributeBoolean( id_jobentry, "specify_wildcard" ); wildcard = rep.getJobEntryAttributeString( id_jobentry, "wildcard" ); wildcardexclude = rep.getJobEntryAttributeString( id_jobentry, "wildcardexclude" ); } catch ( KettleException dbe ) { throw new KettleXMLException( BaseMessages.getString( PKG, "JobEntryDeleteResultFilenames.CanNotLoadFromRep", "" + id_jobentry, dbe.getMessage() ) ); } }
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { filename = rep.getJobEntryAttributeString( id_jobentry, "filename" ); // How many arguments? int argnr = rep.countNrJobEntryAttributes( id_jobentry, "name" ); allocate( argnr ); // Read them all... for ( int a = 0; a < argnr; a++ ) { arguments[a] = rep.getJobEntryAttributeString( id_jobentry, a, "name" ); } } catch ( KettleException dbe ) { throw new KettleException( BaseMessages .getString( PKG, "JobEntryFilesExist.ERROR_0002_Cannot_Load_Job_From_Repository", "" + id_jobentry, dbe .getMessage() ) ); } }
@Test public void testTopLevelMetadataEntries() { try { List<StepInjectionMetaEntry> entries = stepMeta.getStepMetaInjectionInterface().getStepInjectionMetadataEntries(); String masterKeys = "FIELDS"; for ( StepInjectionMetaEntry entry : entries ) { String key = entry.getKey(); assertTrue( masterKeys.contains( key ) ); masterKeys = masterKeys.replace( key, "" ); } assertTrue( masterKeys.trim().length() == 0 ); } catch ( KettleException e ) { fail( e.getMessage() ); } }
public void run() { if ( Job.this.isFinished() ) { log.logBasic( "Shutting down heartbeat signal for " + jobMeta.getName() ); shutdownHeartbeat( heartbeat ); return; } try { log.logDebug( "Triggering heartbeat signal for " + jobMeta.getName() + " at every " + intervalInSeconds + " seconds" ); ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.JobHeartbeat.id, Job.this ); } catch ( KettleException e ) { log.logError( e.getMessage(), e ); } } }, intervalInSeconds /* initial delay */, intervalInSeconds /* interval delay */, TimeUnit.SECONDS );
@Override public void run() { try { if ( Trans.this.isFinished() ) { log.logBasic( "Shutting down heartbeat signal for " + getName() ); shutdownHeartbeat( Trans.this.heartbeat ); return; } log.logDebug( "Triggering heartbeat signal for " + getName() + " at every " + intervalInSeconds + " seconds" ); ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationHeartbeat.id, Trans.this ); } catch ( KettleException e ) { log.logError( e.getMessage(), e ); } } }, intervalInSeconds /* initial delay */, intervalInSeconds /* interval delay */, TimeUnit.SECONDS );
@Test public void testTopLevelMetadataEntries() { try { List<StepInjectionMetaEntry> entries = loader.getStepMeta().getStepMetaInterface().getStepMetaInjectionInterface().getStepInjectionMetadataEntries(); String masterKeys = "SCHEMA TABLE LOADACTION STOPONERROR DELIMITER ENCLOSURE DBNAMEOVERRIDE MAPPINGS "; for ( StepInjectionMetaEntry entry : entries ) { String key = entry.getKey(); assertTrue( masterKeys.contains( key ) ); masterKeys = masterKeys.replace( key, "" ); } assertTrue( masterKeys.trim().length() == 0 ); } catch ( KettleException e ) { fail( e.getMessage() ); } }
@Override public void stopAll() { try { cancelling = true; getDaemonEndpoint().sendMessage( new StopMessage( getErrors() == 0 ? "User Request" : "Error reported" ) ); if ( getErrors() == 0 ) { waitUntilFinished(); finishProcess( true ); } } catch ( KettleException e ) { getLogChannel().logDebug( e.getMessage() ); } finally { cancelling = false; } }
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { includesubfolders = rep.getJobEntryAttributeBoolean( id_jobentry, "include_subfolders" ); comparefilecontent = rep.getJobEntryAttributeBoolean( id_jobentry, "compare_filecontent" ); comparefilesize = rep.getJobEntryAttributeBoolean( id_jobentry, "compare_filesize" ); compareonly = rep.getJobEntryAttributeString( id_jobentry, "compareonly" ); wildcard = rep.getJobEntryAttributeString( id_jobentry, "wildcard" ); filename1 = rep.getJobEntryAttributeString( id_jobentry, "filename1" ); filename2 = rep.getJobEntryAttributeString( id_jobentry, "filename2" ); } catch ( KettleException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "JobFoldersCompare.Meta.UnableLoadRep", "" + id_jobentry, dbe.getMessage() ) ); } }
@Test public void testGuiSuccess() { try { int r = rows.size(); BaseStep step = doOutput( rows, "0" ); ( (IngresVectorwiseLoader) step ).vwLoadMonitorThread.join(); assertEquals( r, step.getLinesOutput() ); assertEquals( r, step.getLinesRead() ); assertEquals( r, step.getLinesWritten() ); assertEquals( 0, step.getLinesRejected() ); assertEquals( 0, step.getErrors() ); } catch ( KettleException e ) { fail( e.getMessage() ); } catch ( InterruptedException e ) { e.printStackTrace(); } }
@Test public void testGuiErrorsWithErrorsAllowed() { try { int r = wrongRows.size(); BaseStep step = doOutput( wrongRows, "2" ); ( (IngresVectorwiseLoader) step ).vwLoadMonitorThread.join(); assertEquals( r - 1, step.getLinesOutput() ); assertEquals( r, step.getLinesRead() ); assertEquals( r, step.getLinesWritten() ); assertEquals( 1, step.getLinesRejected() ); assertEquals( 0, step.getErrors() ); } catch ( KettleException e ) { fail( e.getMessage() ); } catch ( InterruptedException e ) { e.printStackTrace(); } }
@Test public void testGuiErrors() { try { int r = wrongRows.size(); BaseStep step = doOutput( wrongRows, "0" ); ( (IngresVectorwiseLoader) step ).vwLoadMonitorThread.join(); assertEquals( 0, step.getLinesOutput() ); assertEquals( r, step.getLinesRead() ); assertEquals( r, step.getLinesWritten() ); assertEquals( 1, step.getLinesRejected() ); assertEquals( 1, step.getErrors() ); } catch ( KettleException e ) { fail( e.getMessage() ); } catch ( InterruptedException e ) { e.printStackTrace(); } }
@Test public void testWaitForFinish() { try { int r = rows.size(); BaseStep step = doOutput( wrongRows, "2" ); assertEquals( r - 1, step.getLinesOutput() ); assertEquals( r, step.getLinesRead() ); assertEquals( r, step.getLinesWritten() ); assertEquals( 1, step.getLinesRejected() ); assertEquals( 0, step.getErrors() ); } catch ( KettleException e ) { fail( e.getMessage() ); } }