/** * Gets the jobMeta's object id. * * @return ObjectId */ public ObjectId getObjectId() { if ( jobMeta == null ) { return null; } return jobMeta.getObjectId(); }
/** * Load the parameters of this job from the repository. The current ones already loaded will be erased. * * @param jobMeta * The target job for the parameters * * @throws KettleException * Upon any error. * */ private void loadRepParameters( JobMeta jobMeta ) throws KettleException { jobMeta.eraseParameters(); int count = countJobParameter( jobMeta.getObjectId() ); for ( int idx = 0; idx < count; idx++ ) { String key = getJobParameterKey( jobMeta.getObjectId(), idx ); String defValue = getJobParameterDefault( jobMeta.getObjectId(), idx ); String desc = getJobParameterDescription( jobMeta.getObjectId(), idx ); jobMeta.addParameterDefinition( key, defValue, desc ); } }
private boolean isJobOpened( String id, String path, String name ) { List<JobMeta> openedJobFiles = getSpoon().delegates.jobs.getJobList(); for ( JobMeta j : openedJobFiles ) { if ( j.getObjectId() != null && id.equals( j.getObjectId().getId() ) || ( path.equals( j.getRepositoryDirectory().getPath() ) && name.equals( j.getName() ) ) ) { return true; } } return false; }
@Override public void addJobGraph( JobMeta jobMeta ) { super.addJobGraph( jobMeta ); TabMapEntry tabEntry = spoon.delegates.tabs.findTabMapEntry( jobMeta ); if ( tabEntry != null ) { TabItem tabItem = tabEntry.getTabItem(); try { if ( ( service != null ) && ( jobMeta.getObjectId() != null ) && ( service.getJobLock( jobMeta.getObjectId() ) != null ) ) { tabItem.setImage( GUIResource.getInstance().getImageLocked() ); } } catch ( Exception e ) { throw new RuntimeException( e ); } } }
/** * Save the parameters of this job to the repository. * * @param rep * The repository to save to. * * @throws KettleException * Upon any error. */ private void saveJobParameters( JobMeta jobMeta ) throws KettleException { String[] paramKeys = jobMeta.listParameters(); for ( int idx = 0; idx < paramKeys.length; idx++ ) { String desc = jobMeta.getParameterDescription( paramKeys[idx] ); String defValue = jobMeta.getParameterDefault( paramKeys[idx] ); insertJobParameter( jobMeta.getObjectId(), idx, paramKeys[idx], defValue, desc ); } }
if ( jobMeta.getObjectId() != null ) { ObjectId newId = rep.renameJob( jobMeta.getObjectId(), newDirectory, jobMeta.getName() ); jobMeta.setObjectId( newId );
referencingObjects.add( new RepositoryObject( jobMeta.getObjectId(), jobMeta.getName(), jobMeta .getRepositoryDirectory(), null, null, RepositoryObjectType.JOB, null, false ) );
if ( jobMeta.getObjectId() == null ) { jobMeta.setObjectId( repository.connectionDelegate.getNextJobID() ); } else { repository.deleteJob( jobMeta.getObjectId() ); saveJobAttributesMap( jobMeta.getObjectId(), jobMeta.getAttributesMap() ); repository.save( slaveServer, versionComment, null, jobMeta.getObjectId(), false, overwrite ); repository.saveNotePadMeta( ni, jobMeta.getObjectId() ); if ( ni.getObjectId() != null ) { repository.insertJobNote( jobMeta.getObjectId(), ni.getObjectId() ); repository.jobEntryDelegate.saveJobEntryCopy( cge, jobMeta.getObjectId(), repository.metaStore ); if ( monitor != null ) { monitor.worked( 1 ); saveJobHopMeta( hi, jobMeta.getObjectId() ); if ( monitor != null ) { monitor.worked( 1 );
if ( jobMeta.getObjectId() != null ) { ObjectId[] noteids = repository.getJobNoteIDs( jobMeta.getObjectId() ); ObjectId[] jecids = repository.getJobEntryCopyIDs( jobMeta.getObjectId() ); ObjectId[] hopid = repository.getJobHopIDs( jobMeta.getObjectId() ); monitor.subTask( BaseMessages.getString( PKG, "JobMeta.Monitor.ReadingJobInformation" ) ); RowMetaAndData jobRow = getJob( jobMeta.getObjectId() ); jobMeta.getJobLogTable().setLogSizeLimit( getJobAttributeString( jobMeta.getObjectId(), 0, KettleDatabaseRepository.JOB_ATTRIBUTE_LOG_SIZE_LIMIT ) ); new KettleDatabaseRepositoryJobAttribute( repository.connectionDelegate, jobMeta.getObjectId() ); for ( LogTableInterface logTable : jobMeta.getLogTables() ) { logTable.loadFromRepository( attributeInterface ); jobMeta.setAttributesMap( loadJobAttributesMap( jobMeta.getObjectId() ) ); jobMeta.getObjectId(), jecids[i], jobentries, jobMeta.getDatabases(), jobMeta .getSlaveServers(), jobname );
.getObjectId() ); table.addValue( new ValueMetaInteger( KettleDatabaseRepository.FIELD_JOB_ID_DIRECTORY ), jobMeta .getJobLogTable().isLogFieldUsed() ); repository.connectionDelegate.insertJobAttribute( jobMeta.getObjectId(), 0, KettleDatabaseRepository.JOB_ATTRIBUTE_LOG_SIZE_LIMIT, 0, jobMeta .getJobLogTable().getLogSizeLimit() ); repository.insertJobEntryDatabase( jobMeta.getObjectId(), null, jobMeta .getJobLogTable().getDatabaseMeta().getObjectId() ); new KettleDatabaseRepositoryJobAttribute( repository.connectionDelegate, jobMeta.getObjectId() ); for ( LogTableInterface logTable : jobMeta.getLogTables() ) { logTable.saveToRepository( attributeInterface );
if ( ( ( jobMeta.getName() != null && jobMeta.getObjectId() != null && spoon.rep != null ) || ( jobMeta .getFilename() != null && spoon.rep == null ) )
String jobMetaFilename = fileRep.calcFilename( jobMeta.getObjectId() ); jobMeta.setFilename( jobMetaFilename );