private void closeInput() throws KettleFileException { try { if ( data.fisToTmpFile != null ) { data.fisToTmpFile.close(); data.fisToTmpFile = null; } if ( data.disToTmpFile != null ) { data.disToTmpFile.close(); data.disToTmpFile = null; } } catch ( IOException e ) { throw new KettleFileException( BaseMessages.getString( PKG, "GroupBy.Exception.UnableToCloseInputStream", data.tempFile.getPath() ), e ); } }
/** * Create the database cache instance by loading it from disk * * @return the database cache instance. * @throws KettleFileException */ public static final DBCache getInstance() { if ( dbCache != null ) { return dbCache; } try { dbCache = new DBCache(); } catch ( KettleFileException kfe ) { throw new RuntimeException( "Unable to create the database cache: " + kfe.getMessage() ); } return dbCache; }
/** * get the messages back to it's origin cause. */ @Override public String getMessage() { String retval = Const.CR; retval += super.getMessage() + Const.CR; Throwable cause = getCause(); if ( cause != null ) { String message = cause.getMessage(); if ( message != null ) { retval += message + Const.CR; } else { // Add with stack trace elements of cause... StackTraceElement[] ste = cause.getStackTrace(); for ( int i = ste.length - 1; i >= 0; i-- ) { retval += " at " + ste[i].getClassName() + "." + ste[i].getMethodName() + " (" + ste[i].getFileName() + ":" + ste[i].getLineNumber() + ")" + Const.CR; } } } return retval; }
@Override public void dispose( StepMetaInterface smi, StepDataInterface sdi ) { if ( data.tempFile != null ) { try { closeInput(); closeOutput(); } catch ( KettleFileException e ) { log.logError( e.getLocalizedMessage() ); } boolean tempFileDeleted = data.tempFile.delete(); if ( !tempFileDeleted && log.isDetailed() ) { log.logDetailed( BaseMessages.getString( PKG, "GroupBy.Exception.UnableToDeleteTemporaryFile", data.tempFile.getPath() ) ); } } super.dispose( smi, sdi ); }
/** * Write the value, including the meta-data to a DataOutputStream * * @param outputStream * the OutputStream to write to . * @throws KettleFileException * if something goes wrong. */ public void write( OutputStream outputStream ) throws KettleFileException { try { writeObj( new DataOutputStream( outputStream ) ); } catch ( Exception e ) { throw new KettleFileException( "Unable to write value to output stream", e ); } }
/** * get the messages back to it's origin cause. */ @Override public String getMessage() { String retval = Const.CR; retval += super.getMessage() + Const.CR; Throwable cause = getCause(); if ( cause != null ) { String message = cause.getMessage(); if ( message != null ) { retval += message + Const.CR; } else { // Add with stack trace elements of cause... StackTraceElement[] ste = cause.getStackTrace(); for ( int i = ste.length - 1; i >= 0; i-- ) { retval += " at " + ste[i].getClassName() + "." + ste[i].getMethodName() + " (" + ste[i].getFileName() + ":" + ste[i].getLineNumber() + ")" + Const.CR; } } } return retval; }
private void closeOutput() throws KettleFileException { try { if ( data.dosToTempFile != null ) { data.dosToTempFile.close(); data.dosToTempFile = null; } if ( data.fosToTempFile != null ) { data.fosToTempFile.close(); data.fosToTempFile = null; } data.firstRead = true; } catch ( IOException e ) { throw new KettleFileException( BaseMessages.getString( PKG, "GroupBy.Exception.UnableToCloseInputStream", data.tempFile.getPath() ), e ); } }
+ fileName + "\" (reason: \"" + Signal.getMessage() + "\")" ); } catch ( ScriptException Signal ) { throw new RuntimeException( "Error while reading file \""
/** * Read the data for this Cache entry from a data input stream * * @param dis * The DataInputStream to read this entry from. * @throws KettleFileException * if the cache can't be read from disk when it should be able to. If the cache file doesn't exists, no * exception is thrown */ public DBCacheEntry( DataInputStream dis ) throws KettleFileException { try { dbname = dis.readUTF(); sql = dis.readUTF(); } catch ( EOFException eof ) { throw new KettleEOFException( "End of file reached", eof ); } catch ( Exception e ) { throw new KettleFileException( "Unable to read cache entry from data input stream", e ); } }
} catch ( KettleFileException Signal ) { Context.reportError( "Error while reading file \"" + fileName + "\" (reason: \"" + Signal.getMessage() + "\")" ); } finally { try {
private String loadJava( String javaFile, String propertiesFilename, String entry ) throws KettleFileException { if ( Utils.isEmpty( entry ) ) { return ""; } try { String filename = ROOT + "/" + javaFile; StringBuilder content = new StringBuilder( 5000 ); FileInputStream stream = new FileInputStream( filename ); try { int c = 0; while ( ( c = stream.read() ) != -1 ) { content.append( (char) c ); } } finally { stream.close(); } return content.toString(); } catch ( Exception e ) { throw new KettleFileException( propertiesFilename + ": Unable to load file [" + javaFile + "] for key [" + entry + "]", e ); } }
logError( "An error occurred executing this job entry : " + je.getMessage(), je ); } catch ( KettleFileException e ) { logError( "An error occurred executing this job entry : " + e.getMessage(), e ); result.setNrErrors( 1 ); } finally {
public static String getTextFileContent( String vfsFilename, VariableSpace space, String charSetName ) throws KettleFileException { try { InputStream inputStream = null; if ( space == null ) { inputStream = getInputStream( vfsFilename ); } else { inputStream = getInputStream( vfsFilename, space ); } InputStreamReader reader = new InputStreamReader( inputStream, charSetName ); int c; StringBuilder aBuffer = new StringBuilder(); while ( ( c = reader.read() ) != -1 ) { aBuffer.append( (char) c ); } reader.close(); inputStream.close(); return aBuffer.toString(); } catch ( IOException e ) { throw new KettleFileException( e ); } }
logError( BaseMessages.getString( PKG, "JobMysqlBulkFile.Error.Label" ) + " " + je.getMessage() ); } catch ( KettleFileException e ) { logError( BaseMessages.getString( PKG, "JobMysqlBulkFile.Error.Label" ) + e.getMessage() ); result.setNrErrors( 1 );
data.firstRead = false; } catch ( IOException e ) { throw new KettleFileException( BaseMessages.getString( PKG, "GroupBy.Exception.UnableToReadBackRowFromTemporaryFile" ), e ); row = data.inputRowMeta.readData( data.disToTmpFile ); } catch ( SocketTimeoutException e ) { throw new KettleFileException( e ); // Shouldn't happen on files
logError( "An error occurred executing this job entry : " + je.getMessage() ); } catch ( KettleFileException e ) { logError( "An error occurred executing this job entry : " + e.getMessage() ); result.setNrErrors( 1 );
public static InputStream getInputStream( String vfsFilename, VariableSpace space ) throws KettleFileException { try { FileObject fileObject = getFileObject( vfsFilename, space ); return getInputStream( fileObject ); } catch ( IOException e ) { throw new KettleFileException( e ); } }
public void connect() { NamedCluster nc = getNamedClusterWidget().getSelectedNamedCluster(); // The Named Cluster may be hdfs, maprfs or wasb. We need to detect it here since the named // cluster was just selected. schemeName = "wasb".equals( nc.getStorageScheme() ) ? "wasb" : "hdfs"; FileObject root = rootFile; try { root = KettleVFS.getFileObject( nc.processURLsubstitution( FileName.ROOT_PATH, Spoon.getInstance().getMetaStore(), getVariableSpace() ) ); } catch ( KettleFileException exc ) { showMessageAndLog( BaseMessages.getString( PKG, "HadoopVfsFileChooserDialog.error" ), BaseMessages.getString( PKG, "HadoopVfsFileChooserDialog.Connection.error" ), exc.getMessage() ); } vfsFileChooserDialog.setRootFile( root ); vfsFileChooserDialog.setSelectedFile( root ); rootFile = root; }
public static OutputStream getOutputStream( String vfsFilename, VariableSpace space, boolean append ) throws KettleFileException { try { FileObject fileObject = getFileObject( vfsFilename, space ); return getOutputStream( fileObject, append ); } catch ( IOException e ) { throw new KettleFileException( e ); } }
public void connect() { NamedCluster nc = getNamedClusterWidget().getSelectedNamedCluster(); HadoopVfsConnection hdfsConnection = new HadoopVfsConnection( nc, getVariableSpace() ); hdfsConnection.setCustomParameters( Props.getInstance() ); // The Named Cluster may be hdfs, maprfs or wasb. We need to detect it here since the named // cluster was just selected. //schemeName = "wasb".equals( nc.getStorageScheme() ) ? "wasb" : "hdfs"; String connectionString = Schemes.NAMED_CLUSTER_SCHEME + "://" + nc.getName(); FileSystemOptions fsoptions = new FileSystemOptions(); FileObject root = rootFile; try { root = KettleVFS.getFileObject( connectionString, fsoptions ); } catch ( KettleFileException exc ) { showMessageAndLog( BaseMessages.getString( PKG, "HadoopVfsFileChooserDialog.error" ), BaseMessages.getString( PKG, "HadoopVfsFileChooserDialog.Connection.error" ), exc.getMessage() ); } vfsFileChooserDialog.setRootFile( root ); vfsFileChooserDialog.setSelectedFile( root ); rootFile = root; }