public void dispose() { KettleLogStore.getAppender().removeLoggingEventListener( this ); }
private static final void exitJVM( int status ) { // Let's not forget to close the log file we're writing to... // if ( fileLoggingEventListener != null ) { try { fileLoggingEventListener.close(); } catch ( Exception e ) { e.printStackTrace( System.err ); status = 1; } KettleLogStore.getAppender().removeLoggingEventListener( fileLoggingEventListener ); } System.exit( status ); }
@GET @Path( "/log/{id : .+}/{logStart : .+}" ) @Produces( { MediaType.TEXT_PLAIN } ) public String getTransformationLog( @PathParam( "id" ) String id, @PathParam( "logStart" ) int startLineNr ) { int lastLineNr = KettleLogStore.getLastBufferLineNr(); Trans trans = CarteResource.getTransformation( id ); String logText = KettleLogStore.getAppender().getBuffer( trans.getLogChannel().getLogChannelId(), false, startLineNr, lastLineNr ).toString(); return logText; }
@GET @Path( "/log/{id : .+}/{logStart : .+}" ) @Produces( { MediaType.TEXT_PLAIN } ) public String getJobLog( @PathParam( "id" ) String id, @PathParam( "logStart" ) int startLineNr ) { int lastLineNr = KettleLogStore.getLastBufferLineNr(); Job job = CarteResource.getJob( id ); String logText = KettleLogStore.getAppender().getBuffer( job.getLogChannel().getLogChannelId(), false, startLineNr, lastLineNr ).toString(); return logText; }
private String getLogText( Trans trans, int startLineNr, int lastLineNr ) throws KettleException { try { return KettleLogStore.getAppender().getBuffer( trans.getLogChannel().getLogChannelId(), false, startLineNr, lastLineNr ).toString(); } catch ( OutOfMemoryError error ) { throw new KettleException( "Log string is too long", error ); } }
@Override public void init() { pentahoLogger = createLogger( STRING_PENTAHO_DI_LOGGER_NAME ); pentahoLogger.setAdditivity( false ); KettleLogStore.getAppender().addLoggingEventListener( this ); }
private String getLogText( Job job, int startLineNr, int lastLineNr ) throws KettleException { try { return KettleLogStore.getAppender().getBuffer( job.getLogChannel().getLogChannelId(), false, startLineNr, lastLineNr ).toString(); } catch ( OutOfMemoryError error ) { throw new KettleException( "Log string is too long" ); } } }
@VisibleForTesting protected String getLogBuffer( VariableSpace space, String logChannelId, LogStatus status, String limit ) { LoggingBuffer loggingBuffer = KettleLogStore.getAppender(); // if job is starting, then remove all previous events from buffer with that job logChannelId. // Prevents recursive job calls logging issue. if ( status.getStatus().equalsIgnoreCase( String.valueOf( LogStatus.START ) ) ) { loggingBuffer.removeChannelFromBuffer( logChannelId ); } StringBuffer buffer = loggingBuffer.getBuffer( logChannelId, true ); if ( Utils.isEmpty( limit ) ) { String defaultLimit = space.getVariable( Const.KETTLE_LOG_SIZE_LIMIT, null ); if ( !Utils.isEmpty( defaultLimit ) ) { limit = defaultLimit; } } // See if we need to limit the amount of rows // int nrLines = Utils.isEmpty( limit ) ? -1 : Const.toInt( space.environmentSubstitute( limit ), -1 ); if ( nrLines > 0 ) { int start = buffer.length() - 1; for ( int i = 0; i < nrLines && start > 0; i++ ) { start = buffer.lastIndexOf( Const.CR, start - 1 ); } if ( start > 0 ) { buffer.delete( 0, start + Const.CR.length() ); } } return buffer.append( Const.CR + status.getStatus().toUpperCase() + Const.CR ).toString(); }
@Override public void transFinished( Trans trans ) throws KettleException { // Copy over the data from the previewDelegate... // if ( trans.getErrors() != 0 ) { // capture logging and store it... // for ( StepMetaDataCombi combi : trans.getSteps() ) { if ( combi.copy == 0 ) { StringBuffer logBuffer = KettleLogStore.getAppender().getBuffer( combi.step.getLogChannel().getLogChannelId(), false ); previewLogMap.put( combi.stepMeta, logBuffer ); } } } } } );
KettleLogStore.getAppender().addLoggingEventListener( new ConsoleLoggingEventListener() ); KettleLogStore.getAppender().addLoggingEventListener( new Slf4jLoggingEventListener() );
KettleLogStore.getAppender().addLogggingEvent( loggingEvent );
@Test public void testRemoveChannelFromBufferCallInGetLogBufferInFirstJobExecution() { StringBuffer sb = new StringBuffer( "" ); LoggingBuffer lb = mock( LoggingBuffer.class ); doReturn( sb ).when( lb ).getBuffer( anyString(), anyBoolean() ); mockStatic( KettleLogStore.class ); mockStatic( Utils.class ); mockStatic( Const.class ); when( KettleLogStore.getAppender() ).thenReturn( lb ); BaseLogTable baseLogTable = mock( BaseLogTable.class ); doCallRealMethod().when( baseLogTable ).getLogBuffer( any( VariableSpace.class ), anyString(), any( LogStatus.class ), anyString() ); VariableSpace vs = mock( VariableSpace.class ); String s1 = baseLogTable.getLogBuffer( vs, "1", LogStatus.START, null ); String s2 = baseLogTable.getLogBuffer( vs, "1", LogStatus.END, null ); assertEquals( Const.CR + "START" + Const.CR, s1 ); assertEquals( Const.CR + "START" + Const.CR, s1 + Const.CR + "END" + Const.CR, s2 ); verify( lb, times( 1 ) ).removeChannelFromBuffer( "1" ); }
@Test public void testRemoveChannelFromBufferCallInGetLogBufferInRecursiveJobExecution() { StringBuffer sb = new StringBuffer( "Event previously executed for the same Job" ); LoggingBuffer lb = mock( LoggingBuffer.class ); doReturn( sb ).when( lb ).getBuffer( anyString(), anyBoolean() ); mockStatic( KettleLogStore.class ); mockStatic( Utils.class ); mockStatic( Const.class ); when( KettleLogStore.getAppender() ).thenReturn( lb ); BaseLogTable baseLogTable = mock( BaseLogTable.class ); doCallRealMethod().when( baseLogTable ).getLogBuffer( any( VariableSpace.class ), anyString(), any( LogStatus.class ), anyString() ); VariableSpace vs = mock( VariableSpace.class ); String s1 = baseLogTable.getLogBuffer( vs, "1", LogStatus.START, null ); String s2 = baseLogTable.getLogBuffer( vs, "1", LogStatus.END, null ); //removeChannelFromBuffer function is void - need to simulate the behaviour here s1 = s1.replace( "Event previously executed for the same Job", "" ); s2 = s2.replace( "Event previously executed for the same Job", "" ); assertEquals( Const.CR + "START" + Const.CR, s1 ); assertEquals( Const.CR + "START" + Const.CR, s1 + Const.CR + "END" + Const.CR, s2 ); verify( lb, times( 1 ) ).removeChannelFromBuffer( "1" ); }
public void loadSettings() { LogLevel logLevel = LogLevel.getLogLevelForCode( props.getLogLevel() ); DefaultLogLevel.setLogLevel( logLevel ); log.setLogLevel( logLevel ); KettleLogStore.getAppender().setMaxNrLines( props.getMaxNrLinesInLog() ); // transMeta.setMaxUndo(props.getMaxUndo()); DBCache.getInstance().setActive( props.useDBCache() ); }
private boolean handleError() throws KettleStepException { SingleThreaderData singleThreaderData = getData(); if ( getStepMeta().isDoingErrorHandling() ) { int lastLogLine = KettleLogStore.getLastBufferLineNr(); StringBuffer logText = KettleLogStore.getAppender().getBuffer( singleThreaderData.mappingTrans.getLogChannelId(), false, singleThreaderData.lastLogLine ); singleThreaderData.lastLogLine = lastLogLine; for ( Object[] row : singleThreaderData.errorBuffer ) { putError( getInputRowMeta(), row, 1L, logText.toString(), null, "STR-001" ); } singleThreaderData.executor.clearError(); return true; // continue } else { setErrors( 1 ); stopAll(); logError( BaseMessages.getString( PKG, "SingleThreader.Log.ErrorOccurredInSubTransformation" ) ); return false; // stop running } }
private static void initLogging( CommandLineOption[] options ) throws KettleException { StringBuilder optionLogFile = getCommandLineOption( options, "logfile" ).getArgument(); StringBuilder optionLogLevel = getCommandLineOption( options, "level" ).getArgument(); // Set default Locale: Locale.setDefault( Const.DEFAULT_LOCALE ); if ( !Utils.isEmpty( optionLogFile ) ) { fileLoggingEventListener = new FileLoggingEventListener( optionLogFile.toString(), true ); if ( log.isBasic() ) { String filename = fileLoggingEventListener.getFilename(); log.logBasic( BaseMessages.getString( PKG, "Spoon.Log.LoggingToFile" ) + filename ); } KettleLogStore.getAppender().addLoggingEventListener( fileLoggingEventListener ); } else { fileLoggingEventListener = null; } if ( !Utils.isEmpty( optionLogLevel ) ) { log.setLogLevel( LogLevel.getLogLevelForCode( optionLogLevel.toString() ) ); if ( log.isBasic() ) { // "Logging is at level : " log.logBasic( BaseMessages.getString( PKG, "Spoon.Log.LoggingAtLevel" ) + log.getLogLevel().getDescription() ); } } }
log.logDebug( "child index = " + childIndex + ", logging object : " + loggingObject.toString() + " parent=" + parentLogChannelId ); KettleLogStore.getAppender().getBuffer( "2bcc6b3f-c660-4a8b-8b17-89e8cbd5b29b", false );
trans.waitUntilFinished(); if ( trans.getErrors() > 0 ) { StringBuffer log = KettleLogStore.getAppender().getBuffer( trans.getLogChannelId(), false ); buffer.append( log );
String logText = KettleLogStore.getAppender().getBuffer( channelId, false ).toString(); outputRow[ idx++ ] = logText;
LogChannel.GENERAL.logError( "Error closing logging file", e ); KettleLogStore.getAppender().removeLoggingEventListener( fileLoggingEventListener );