/** * Execute an SQL statement on the database connection (has to be open) * * @param sql The SQL to execute * @return a Result object indicating the number of lines read, deleted, inserted, updated, ... * @throws KettleDatabaseException in case anything goes wrong. */ public Result execStatement( String sql ) throws KettleDatabaseException { return execStatement( sql, null, null ); }
data.result = data.db.execStatement( data.sql ); } else { data.result = data.db.execStatements( data.sql );
@Override public void run() { try { data.db.execStatement( loadCommand ); } catch ( Exception ex ) { this.ex = ex; } }
public void execStatement( String sql ) throws KettleException { connectionDelegate.getDatabase().execStatement( sql ); }
public void truncateTable( String schema, String tablename ) throws KettleDatabaseException { if ( Utils.isEmpty( connectionGroup ) ) { String truncateStatement = databaseMeta.getTruncateTableStatement( schema, tablename ); if ( truncateStatement == null ) { throw new KettleDatabaseException( "Truncate table not supported by " + databaseMeta.getDatabaseInterface().getPluginName() ); } execStatement( truncateStatement ); } else { execStatement( "DELETE FROM " + databaseMeta.getQuotedSchemaTableCombination( schema, tablename ) ); } }
public void truncateTable( String tablename ) throws KettleDatabaseException { if ( Utils.isEmpty( connectionGroup ) ) { String truncateStatement = databaseMeta.getTruncateTableStatement( null, tablename ); if ( truncateStatement == null ) { throw new KettleDatabaseException( "Truncate table not supported by " + databaseMeta.getDatabaseInterface().getPluginName() ); } execStatement( truncateStatement ); } else { execStatement( "DELETE FROM " + databaseMeta.quoteField( tablename ) ); } }
public Long getNextBatchIdUsingLockTables( DatabaseMeta dbm, Database ldb, String schemaName, String tableName, String fieldName ) throws KettleDatabaseException { // The old way of doing things... Long rtn = null; // Make sure we lock that table to avoid concurrency issues String schemaAndTable = dbm.getQuotedSchemaTableCombination( schemaName, tableName ); ldb.lockTables( new String[] { schemaAndTable, } ); try { // Now insert value -1 to create a real write lock blocking the other // requests.. FCFS String sql = "INSERT INTO " + schemaAndTable + " (" + dbm.quoteField( fieldName ) + ") values (-1)"; ldb.execStatement( sql ); // Now this next lookup will stall on the other connections // rtn = ldb.getNextValue( null, schemaName, tableName, fieldName ); } finally { // Remove the -1 record again... String sql = "DELETE FROM " + schemaAndTable + " WHERE " + dbm.quoteField( fieldName ) + "= -1"; ldb.execStatement( sql ); ldb.unlockTables( new String[] { schemaAndTable, } ); } return rtn; }
/** * Unlock certain tables in the database for write operations * * @param tableNames The tables to unlock * @throws KettleDatabaseException */ public void unlockTables( String[] tableNames ) throws KettleDatabaseException { if ( Utils.isEmpty( tableNames ) ) { return; } // Quote table names too... // String[] quotedTableNames = new String[ tableNames.length ]; for ( int i = 0; i < tableNames.length; i++ ) { quotedTableNames[ i ] = databaseMeta.getQuotedSchemaTableCombination( null, tableNames[ i ] ); } // Get the SQL to unlock the (quoted) tables // String sql = databaseMeta.getSQLUnlockTables( quotedTableNames ); if ( sql != null ) { execStatement( sql ); } }
@Override public boolean dropTable() { TableOutputMeta meta = getMeta(); TableOutputData data = getData(); String schema = meta.getSchemaName(); String table = meta.getTableName(); if ( schema != null && !schema.equals( "" ) ) { table = schema + "." + table; } String sql = "drop table " + table + ";"; try { Result result = data.db.execStatement( sql ); int status = result.getExitStatus(); if ( status == 0 ) { util.updateMetadata( meta, -1 ); } return status == 0; } catch ( KettleDatabaseException e ) { message = "Could not drop table: " + table; logError( message, e ); } return false; }
execStatement( sql, updateRowMeta, updateRowData );
public synchronized void renameUser( ObjectId id_user, String newname ) throws KettleException { String sql = "UPDATE " + quoteTable( KettleDatabaseRepository.TABLE_R_USER ) + " SET " + quote( KettleDatabaseRepository.FIELD_USER_NAME ) + " = ? WHERE " + quote( KettleDatabaseRepository.FIELD_USER_ID_USER ) + " = ?"; RowMetaAndData table = new RowMetaAndData(); table.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_USER_NAME ), newname ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_USER_ID_USER ), id_user ); repository.connectionDelegate.getDatabase().execStatement( sql, table.getRowMeta(), table.getData() ); } }
execStatement( sql, row.getRowMeta(), row.getData() ); } catch ( Exception e ) { DatabaseLogExceptionFactory.getExceptionStrategy( logTable )
public synchronized void moveJob( String jobname, ObjectId id_directory_from, ObjectId id_directory_to ) throws KettleException { String sql = "UPDATE " + quoteTable( KettleDatabaseRepository.TABLE_R_JOB ) + " SET " + quote( KettleDatabaseRepository.FIELD_JOB_ID_DIRECTORY ) + " = ? WHERE " + quote( KettleDatabaseRepository.FIELD_JOB_NAME ) + " = ? AND " + quote( KettleDatabaseRepository.FIELD_JOB_ID_DIRECTORY ) + " = ?"; RowMetaAndData par = new RowMetaAndData(); par.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ID_DIRECTORY ), id_directory_to ); par.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_NAME ), jobname ); par.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ID_DIRECTORY ), id_directory_from ); repository.connectionDelegate.getDatabase().execStatement( sql, par.getRowMeta(), par.getData() ); }
param.addValue( valField, ValueMetaInterface.TYPE_INTEGER, Long.valueOf( maximum ) ); db.execStatement( sql, param.getRowMeta(), param.getData() );
.toString() ) ); repository.connectionDelegate.getDatabase().execStatement( sql, r.getRowMeta(), r.getData() );
public synchronized void moveTransformation( String transname, ObjectId id_directory_from, ObjectId id_directory_to ) throws KettleException { String nameField = quote( KettleDatabaseRepository.FIELD_TRANSFORMATION_NAME ); String sql = "UPDATE " + quoteTable( KettleDatabaseRepository.TABLE_R_TRANSFORMATION ) + " SET " + quote( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DIRECTORY ) + " = ? WHERE " + nameField + " = ? AND " + quote( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DIRECTORY ) + " = ?"; RowMetaAndData par = new RowMetaAndData(); par .addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DIRECTORY ), id_directory_to ); par.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_TRANSFORMATION_NAME ), transname ); par .addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_TRANSFORMATION_ID_DIRECTORY ), id_directory_from ); repository.connectionDelegate.getDatabase().execStatement( sql, par.getRowMeta(), par.getData() ); }
try { db.connect(); db.execStatement( "CALL VECTORWISE( COMBINE '" + data.schemaTable + " - " + data.schemaTable + "' )" ); db.execStatement( "CALL VECTORWISE( COMBINE '" + data.schemaTable + " - " + data.schemaTable + "' )" ); log.logDetailed( "Table " + data.schemaTable + " was truncated using a 'combine' statement." ); } catch ( Exception e ) {
public synchronized void renameJob( ObjectId id_job, RepositoryDirectoryInterface newParentDir, String newname ) throws KettleException { if ( newParentDir != null || newname != null ) { RowMetaAndData table = new RowMetaAndData(); String sql = "UPDATE " + quoteTable( KettleDatabaseRepository.TABLE_R_JOB ) + " SET "; boolean additionalParameter = false; if ( newname != null ) { additionalParameter = true; sql += quote( KettleDatabaseRepository.FIELD_JOB_NAME ) + " = ? "; table.addValue( new ValueMetaString( KettleDatabaseRepository.FIELD_JOB_NAME ), newname ); } if ( newParentDir != null ) { if ( additionalParameter ) { sql += ", "; } sql += quote( KettleDatabaseRepository.FIELD_JOB_ID_DIRECTORY ) + " = ? "; table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ID_DIRECTORY ), newParentDir .getObjectId() ); } sql += "WHERE " + quote( KettleDatabaseRepository.FIELD_JOB_ID_JOB ) + " = ?"; table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ID_JOB ), id_job ); log.logBasic( "sql = [" + sql + "]" ); log.logBasic( "row = [" + table + "]" ); repository.connectionDelegate.getDatabase().execStatement( sql, table.getRowMeta(), table.getData() ); } }
data.db.execStatement( databaseMeta.stripCR( isql ) ); } catch ( KettleException e ) { throw new KettleDatabaseException( "Error inserting 'unknown' row in dimension ["
log.logBasic( "row = [" + table + "]" ); repository.connectionDelegate.getDatabase().execStatement( sql, table.getRowMeta(), table.getData() ); repository.connectionDelegate.getDatabase().commit();