protected Database getDatabase() { // Added for test purposes return new Database( loggingObject, databaseMeta ); }
protected Database createDataBase( DatabaseMeta databaseMeta ) { return new Database( this, databaseMeta ); }
Database getDatabase( DatabaseMeta meta ) { return new Database( this, meta ); }
Database createDatabaseObject() { return new Database( loggingObject, databaseMeta ); }
protected Database createDataBase( DatabaseMeta meta ) { return new Database( this, meta ); }
Database getNewDatabaseFromMeta() { return new Database( this, connection ); }
Database createDatabaseObject() { return new Database( loggingObject, databaseMeta ); }
public KettleDatabaseRepositoryConnectionDelegate( KettleDatabaseRepository repository, DatabaseMeta databaseMeta ) { super( repository ); this.databaseMeta = databaseMeta; this.database = new Database( loggingObject, databaseMeta ); sqlMap = new ConcurrentHashMap<>(); useBatchProcessing = true; // defaults to true; psStepAttributesLookup = null; psStepAttributesInsert = null; psTransAttributesLookup = null; pstmt_entry_attributes = null; this.majorVersion = REQUIRED_MAJOR_VERSION; this.minorVersion = REQUIRED_MINOR_VERSION; }
public void run( IProgressMonitor monitor ) throws InvocationTargetException, InterruptedException { db = new Database( Spoon.loggingObject, dbMeta ); try { db.connect(); result = db.getQueryFields( sql, false ); if ( monitor.isCanceled() ) { throw new InvocationTargetException( new Exception( "This operation was cancelled!" ) ); } } catch ( Exception e ) { throw new InvocationTargetException( e, "Problem encountered determining query fields: " + e.toString() ); } finally { db.disconnect(); } } };
private RowMetaInterface getTableFields( LoggingObjectInterface parentLoggingObject ) throws KettleDatabaseException { Database database = new Database( parentLoggingObject, databaseMeta ); try { database.connect(); return database.getTableFields( schemaTable ); } finally { database.disconnect(); } }
public void run( IProgressMonitor monitor ) throws InvocationTargetException, InterruptedException { db = new Database( Spoon.loggingObject, dbMeta ); try { db.connect(); String sql = dbMeta.getDatabaseInterface().getSelectCountStatement( tableName ); RowMetaAndData row = db.getOneRow( sql ); size = row.getRowMeta().getInteger( row.getData(), 0 ); if ( monitor.isCanceled() ) { throw new InvocationTargetException( new Exception( "This operation was cancelled!" ) ); } } catch ( KettleException e ) { throw new InvocationTargetException( e, "Couldn't get a result because of an error :" + e.toString() ); } finally { db.disconnect(); } } };
protected void checkConnection() throws KettleDatabaseException { // check connection // connect and disconnect Database dbchecked = null; try { dbchecked = new Database( this, connection ); dbchecked.shareVariablesWith( this ); dbchecked.connect( parentJob.getTransactionId(), null ); } finally { if ( dbchecked != null ) { dbchecked.disconnect(); } } }
@VisibleForTesting Database getDatabase( LoggingObjectInterface parentObject, PGBulkLoaderMeta pgBulkLoaderMeta ) { DatabaseMeta dbMeta = pgBulkLoaderMeta.getDatabaseMeta(); // If dbNameOverride is present, clone the origin db meta and override the DB name String dbNameOverride = environmentSubstitute( pgBulkLoaderMeta.getDbNameOverride() ); if ( !Utils.isEmpty( dbNameOverride ) ) { dbMeta = (DatabaseMeta) pgBulkLoaderMeta.getDatabaseMeta().clone(); dbMeta.setDBName( dbNameOverride.trim() ); logDebug( "DB name overridden to the value: " + dbNameOverride ); } return new Database( parentObject, dbMeta ); }
/** * @return the database. * @throws KettleException * if an error occurs. */ public Database connectToDatabase() throws KettleException { if ( this.getDbMeta() != null ) { Database db = new Database( loggingObject, this.getDbMeta() ); db.connect(); return db; } throw new KettleException( MESSAGES.getString( "TeraFastMeta.Exception.ConnectionNotDefined" ) ); }
private Database createVirtualDb( DatabaseMeta meta ) throws Exception { ResultSet rs = mock( ResultSet.class ); when( rs.getMetaData() ).thenReturn( mock( ResultSetMetaData.class ) ); PreparedStatement ps = mock( PreparedStatement.class ); when( ps.executeQuery() ).thenReturn( rs ); Connection connection = mock( Connection.class ); when( connection.prepareStatement( anyString() ) ).thenReturn( ps ); Database db = new Database( mock( LoggingObjectInterface.class ), meta ); db.setConnection( connection ); db = spy( db ); doNothing().when( db ).normalConnect( anyString() ); ValueMetaInterface binary = new ValueMetaString( BINARY_FIELD ); binary.setStorageType( ValueMetaInterface.STORAGE_TYPE_BINARY_STRING ); ValueMetaInterface id = new ValueMetaInteger( ID_FIELD ); RowMetaInterface metaByQuerying = new RowMeta(); metaByQuerying.addValueMeta( binary ); metaByQuerying.addValueMeta( id ); doReturn( metaByQuerying ).when( db ).getTableFields( anyString() ); doReturn( metaByQuerying ).when( db ).getTableFieldsMeta( anyString(), anyString() ); return db; }
public void run( IProgressMonitor monitor ) throws InvocationTargetException, InterruptedException { db = new Database( Spoon.loggingObject, dbMeta ); try { db.connect(); if ( limit > 0 ) { db.setQueryLimit( limit ); } rows = db.getFirstRows( tableName, limit, new ProgressMonitorAdapter( monitor ) ); rowMeta = db.getReturnRowMeta(); } catch ( KettleException e ) { throw new InvocationTargetException( e, "Couldn't find any rows because of an error :" + e.toString() ); } finally { db.disconnect(); } } };
public boolean getInputData() { // Get some data... RipDatabaseWizardPage1 page1 = (RipDatabaseWizardPage1) getPreviousPage(); Database sourceDb = new Database( RipDatabaseWizard.loggingObject, page1.getSourceDatabase() ); try { sourceDb.connect(); input = sourceDb.getTablenames( false ); // Don't include the schema since it can cause invalid syntax } catch ( KettleDatabaseException dbe ) { new ErrorDialog( shell, "Error getting tables", "Error obtaining table list from database!", dbe ); input = null; return false; } finally { sourceDb.disconnect(); } return true; }
public boolean getInputData() { // Get some data... CopyTableWizardPage1 page1 = (CopyTableWizardPage1) getPreviousPage(); Database sourceDb = new Database( CopyTableWizard.loggingObject, page1.getSourceDatabase() ); try { sourceDb.connect(); input = sourceDb.getTablenames(); } catch ( KettleDatabaseException dbe ) { new ErrorDialog( shell, BaseMessages.getString( PKG, "CopyTableWizardPage2.ErrorGettingTables.DialogTitle" ), BaseMessages.getString( PKG, "CopyTableWizardPage2.ErrorGettingTables.DialogMessage" ), dbe ); input = null; return false; } finally { sourceDb.disconnect(); } return true; }
@Before public void setUp() { Job parentJob = new Job( null, new JobMeta() ); jobEntry = spy( new JobEntryColumnsExist( "" ) ); parentJob.getJobMeta().addJobEntry( new JobEntryCopy( jobEntry ) ); parentJob.setStopped( false ); jobEntry.setParentJob( parentJob ); parentJob.setLogLevel( LogLevel.NOTHING ); DatabaseMeta dbMeta = mock( DatabaseMeta.class ); jobEntry.setDatabase( dbMeta ); db = spy( new Database( jobEntry, dbMeta ) ); jobEntry.setParentJob( parentJob ); jobEntry.setTablename( TABLENAME ); jobEntry.setArguments( COLUMNS ); jobEntry.setSchemaname( SCHEMANAME ); }
/** * Execute fastload. * * @throws KettleException * ... */ public void execute() throws KettleException { if ( this.meta.getTruncateTable().getValue() ) { Database db = new Database( this, this.meta.getDbMeta() ); db.connect(); db.truncateTable( this.meta.getTargetTable().getValue() ); db.commit(); db.disconnect(); } startFastLoad(); if ( this.meta.getUseControlFile().getValue() ) { this.invokeLoadingControlFile(); } else { this.invokeLoadingCommand(); } }