/** * Log detailed. * * @param message the message * @param arguments the arguments */ public void logDetailed( String message, Object... arguments ) { log.logDetailed( message, arguments ); }
/** * Logs the specified string at the detailed level. * * @param message * the message */ public void logDetailed( String message ) { log.logDetailed( message ); }
/** * Log detailed. * * @param message the message */ public void logDetailed( String message ) { log.logDetailed( message ); }
/** * Logs the specified string and arguments at the detailed level. * * @param message * the message * @param arguments * the arguments */ public void logDetailed( String message, Object... arguments ) { log.logDetailed( message, arguments ); }
public void logDetailed( String s, Object... arguments ) { log.logDetailed( s, arguments ); }
public void logDetailed( String s ) { log.logDetailed( s ); }
/** * Log detailed. * * @param message the message */ public void logDetailed( String message ) { getLog().logDetailed( message ); }
/** * Log detailed. * * @param message the message * @param arguments the arguments */ public void logDetailed( String message, Object... arguments ) { getLog().logDetailed( message, arguments ); }
@Override public RowSet parse( InputStream in ) throws KettleException { readInput( in ); List<List<?>> results = evalCombinedResult(); int len = results.isEmpty() ? 0 : results.get( 0 ).size(); if ( log.isDetailed() ) { log.logDetailed( BaseMessages.getString( PKG, "JsonInput.Log.NrRecords", len ) ); } if ( len == 0 ) { return getEmptyResponse(); } return new TransposedRowSet( results ); }
@Test public void testLogDetailed() { streamLogger = new ConfigurableStreamLogger( log, is, LogLevel.DETAILED, PREFIX ); streamLogger.run(); Mockito.verify( log ).logDetailed( OUT1 ); Mockito.verify( log ).logDetailed( OUT2 ); }
/** * Specify after how many rows a commit needs to occur when inserting or updating values. * * @param commsize The number of rows to wait before doing a commit on the connection. */ public void setCommit( int commsize ) { commitsize = commsize; String onOff = ( commitsize <= 0 ? "on" : "off" ); try { connection.setAutoCommit( commitsize <= 0 ); if ( log.isDetailed() ) { log.logDetailed( "Auto commit " + onOff ); } } catch ( Exception e ) { if ( log.isDebug() ) { log.logDebug( "Can't turn auto commit " + onOff + Const.CR + Const.getStackTracker( e ) ); } } }
/** * Only for unique connections usage, typically you use disconnect() to disconnect() from the database. * * @throws KettleDatabaseException in case there is an error during connection close. */ public synchronized void closeConnectionOnly() throws KettleDatabaseException { try { if ( connection != null ) { connection.close(); if ( !databaseMeta.isUsingConnectionPool() ) { connection = null; } } if ( log.isDetailed() ) { log.logDetailed( "Connection to database closed!" ); } } catch ( SQLException e ) { throw new KettleDatabaseException( "Error disconnecting from database '" + toString() + "'", e ); } }
public String sendXML( String xml, String service ) throws Exception { HttpPost method = buildSendXMLMethod( xml.getBytes( Const.XML_ENCODING ), service ); try { return executeAuth( method ); } finally { // Release current connection to the connection pool once you are done method.releaseConnection(); if ( log.isDetailed() ) { log.logDetailed( BaseMessages.getString( PKG, "SlaveServer.DETAILED_SentXmlToService", service, environmentSubstitute( hostname ) ) ); } } }
@Override public void dispose( StepMetaInterface smi, StepDataInterface sdi ) { if ( data.tempFile != null ) { try { closeInput(); closeOutput(); } catch ( KettleFileException e ) { log.logError( e.getLocalizedMessage() ); } boolean tempFileDeleted = data.tempFile.delete(); if ( !tempFileDeleted && log.isDetailed() ) { log.logDetailed( BaseMessages.getString( PKG, "GroupBy.Exception.UnableToDeleteTemporaryFile", data.tempFile.getPath() ) ); } } super.dispose( smi, sdi ); }
public final void connect( String username, String password ) throws KettleException { Hashtable<String, String> env = new Hashtable<String, String>(); setupEnvironment( env, username, password ); try { /* Establish LDAP association */ doConnect( username, password ); if ( log.isBasic() ) { log.logBasic( BaseMessages.getString( PKG, "LDAPInput.Log.ConnectedToServer", hostname, Const.NVL( username, "" ) ) ); } if ( log.isDetailed() ) { log.logDetailed( BaseMessages.getString( PKG, "LDAPInput.ClassUsed.Message", ctx.getClass().getName() ) ); } } catch ( Exception e ) { throw new KettleException( BaseMessages.getString( PKG, "LDAPinput.Exception.ErrorConnecting", e .getMessage() ), e ); } }
/** * Prepare inserting values into a table, using the fields & values in a Row * * @param rowMeta The metadata row to determine which values need to be inserted * @param schemaName The name of the schema in which we want to insert rows * @param tableName The name of the table in which we want to insert rows * @throws KettleDatabaseException if something went wrong. */ public void prepareInsert( RowMetaInterface rowMeta, String schemaName, String tableName ) throws KettleDatabaseException { if ( rowMeta.size() == 0 ) { throw new KettleDatabaseException( "No fields in row, can't insert!" ); } String ins = getInsertStatement( schemaName, tableName, rowMeta ); if ( log.isDetailed() ) { log.logDetailed( "Preparing statement: " + Const.CR + ins ); } prepStatementInsert = prepareSQL( ins ); }
/** * Sets the parent logging object. * * @param parent the new parent */ public void setParent( LoggingObjectInterface parent ) { this.parent = parent; this.log = new LogChannel( this, parent ); this.logLevel = log.getLogLevel(); if ( this.containerObjectId == null ) { this.containerObjectId = log.getContainerObjectId(); } if ( log.isDetailed() ) { log.logDetailed( BaseMessages.getString( PKG, "Trans.Log.TransformationIsPreloaded" ) ); } if ( log.isDebug() ) { log.logDebug( BaseMessages.getString( PKG, "Trans.Log.NumberOfStepsToRun", String.valueOf( transMeta.nrSteps() ), String.valueOf( transMeta.nrTransHops() ) ) ); } }
private void addFilenameToResult() throws FileSystemException { if ( meta.isaddTargetFileNametoResult() ) { // Add this to the result file names... ResultFile resultFile = new ResultFile( ResultFile.FILE_TYPE_GENERAL, data.zipFile, getTransMeta().getName(), getStepname() ); resultFile.setComment( BaseMessages.getString( PKG, "ZipFile.Log.FileAddedResult" ) ); addResultFile( resultFile ); if ( log.isDetailed() ) { log.logDetailed( toString(), BaseMessages.getString( PKG, "ZipFile.Log.FilenameAddResult", data.sourceFile .toString() ) ); } } }
public void saveRepositoryDirectory( RepositoryDirectoryInterface dir ) throws KettleException { try { ObjectId id_directory_parent = null; if ( dir.getParent() != null ) { id_directory_parent = dir.getParent().getObjectId(); } dir.setObjectId( insertDirectory( id_directory_parent, dir ) ); log.logDetailed( "New id of directory = " + dir.getObjectId() ); repository.commit(); } catch ( Exception e ) { throw new KettleException( "Unable to save directory [" + dir + "] in the repository", e ); } }
@Override public void saveRepositoryDirectory( RepositoryDirectoryInterface dir ) throws KettleException { try { String filename = calcDirectoryName( dir ); ObjectId objectId = new StringObjectId( calcRelativeElementDirectory( dir ) ); FileObject fileObject = KettleVFS.getFileObject( filename ); fileObject.createFolder(); // also create parents dir.setObjectId( objectId ); log.logDetailed( "New id of directory = " + dir.getObjectId() ); } catch ( Exception e ) { throw new KettleException( "Unable to save directory [" + dir + "] in the repository", e ); } }