@Override public int countNrJobEntryAttributes( ObjectId objectId, String s ) throws KettleException { return getDelegate().countNrJobEntryAttributes( objectId, s ); }
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { argFromPrevious = rep.getJobEntryAttributeBoolean( id_jobentry, "arg_from_previous" ); includeSubfolders = rep.getJobEntryAttributeBoolean( id_jobentry, "include_subfolders" ); // How many arguments? int argnr = rep.countNrJobEntryAttributes( id_jobentry, "name" ); arguments = new String[argnr]; filemasks = new String[argnr]; // Read them all... for ( int a = 0; a < argnr; a++ ) { arguments[a] = rep.getJobEntryAttributeString( id_jobentry, a, "name" ); filemasks[a] = rep.getJobEntryAttributeString( id_jobentry, a, "filemask" ); } } catch ( KettleException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "JobEntryCheckFilesLocked.UnableToLoadFromRepo", String.valueOf( id_jobentry ) ), dbe ); } }
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { argFromPrevious = rep.getJobEntryAttributeBoolean( id_jobentry, "arg_from_previous" ); includeSubfolders = rep.getJobEntryAttributeBoolean( id_jobentry, "include_subfolders" ); int numberOfArgs = rep.countNrJobEntryAttributes( id_jobentry, "name" ); allocate( numberOfArgs ); for ( int i = 0; i < numberOfArgs; i++ ) { arguments[i] = rep.getJobEntryAttributeString( id_jobentry, i, "name" ); filemasks[i] = rep.getJobEntryAttributeString( id_jobentry, i, "filemask" ); } } catch ( KettleException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "JobEntryDeleteFiles.UnableToLoadFromRepo", String .valueOf( id_jobentry ) ), dbe ); } }
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { argFromPrevious = rep.getJobEntryAttributeBoolean( id_jobentry, "arg_from_previous" ); limit_folders = rep.getJobEntryAttributeString( id_jobentry, "limit_folders" ); success_condition = rep.getJobEntryAttributeString( id_jobentry, "success_condition" ); // How many arguments? int argnr = rep.countNrJobEntryAttributes( id_jobentry, "name" ); allocate( argnr ); // Read them all... for ( int a = 0; a < argnr; a++ ) { arguments[a] = rep.getJobEntryAttributeString( id_jobentry, a, "name" ); } } catch ( KettleException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "JobEntryDeleteFolders.UnableToLoadFromRepo", String .valueOf( id_jobentry ) ), dbe ); } }
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { argFromPrevious = rep.getJobEntryAttributeBoolean( id_jobentry, "arg_from_previous" ); includeSubfolders = rep.getJobEntryAttributeBoolean( id_jobentry, "include_subfolders" ); deleteallbefore = rep.getJobEntryAttributeBoolean( id_jobentry, "delete_all_before" ); // How many arguments? int argnr = rep.countNrJobEntryAttributes( id_jobentry, "name" ); arguments = new String[argnr]; filemasks = new String[argnr]; // Read them all... for ( int a = 0; a < argnr; a++ ) { arguments[a] = rep.getJobEntryAttributeString( id_jobentry, a, "name" ); filemasks[a] = rep.getJobEntryAttributeString( id_jobentry, a, "filemask" ); } } catch ( KettleException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "JobEntryAddResultFilenames.UnableToLoadFromRepo", String.valueOf( id_jobentry ) ), dbe ); } }
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { filename = rep.getJobEntryAttributeString( id_jobentry, "filename" ); // How many arguments? int argnr = rep.countNrJobEntryAttributes( id_jobentry, "name" ); allocate( argnr ); // Read them all... for ( int a = 0; a < argnr; a++ ) { arguments[a] = rep.getJobEntryAttributeString( id_jobentry, a, "name" ); } } catch ( KettleException dbe ) { throw new KettleException( BaseMessages .getString( PKG, "JobEntryFilesExist.ERROR_0002_Cannot_Load_Job_From_Repository", "" + id_jobentry, dbe .getMessage() ) ); } }
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { tablename = rep.getJobEntryAttributeString( id_jobentry, "tablename" ); schemaname = rep.getJobEntryAttributeString( id_jobentry, "schemaname" ); connection = rep.loadDatabaseMetaFromJobEntryAttribute( id_jobentry, "connection", "id_database", databases ); // How many arguments? int argnr = rep.countNrJobEntryAttributes( id_jobentry, "name" ); arguments = new String[argnr]; // Read them all... for ( int a = 0; a < argnr; a++ ) { arguments[a] = rep.getJobEntryAttributeString( id_jobentry, a, "name" ); } } catch ( KettleDatabaseException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "JobEntryColumnsExist.Meta.UnableLoadRep", "" + id_jobentry ), dbe ); } }
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { connection = rep.loadDatabaseMetaFromJobEntryAttribute( id_jobentry, "connection", "id_database", databases ); this.argFromPrevious = rep.getJobEntryAttributeBoolean( id_jobentry, "arg_from_previous" ); // How many arguments? int argnr = rep.countNrJobEntryAttributes( id_jobentry, "name" ); allocate( argnr ); // Read them all... for ( int a = 0; a < argnr; a++ ) { this.arguments[a] = rep.getJobEntryAttributeString( id_jobentry, a, "name" ); this.schemaname[a] = rep.getJobEntryAttributeString( id_jobentry, a, "schemaname" ); } } catch ( KettleDatabaseException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "JobEntryTruncateTables.UnableLoadRep", "" + id_jobentry ), dbe ); } }
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { xmlfilename = rep.getJobEntryAttributeString( id_jobentry, "xmlfilename" ); xslfilename = rep.getJobEntryAttributeString( id_jobentry, "xslfilename" ); outputfilename = rep.getJobEntryAttributeString( id_jobentry, "outputfilename" ); iffileexists = (int) rep.getJobEntryAttributeInteger( id_jobentry, "iffileexists" ); addfiletoresult = rep.getJobEntryAttributeBoolean( id_jobentry, "addfiletoresult" ); filenamesfromprevious = rep.getJobEntryAttributeBoolean( id_jobentry, "filenamesfromprevious" ); xsltfactory = rep.getJobEntryAttributeString( id_jobentry, "xsltfactory" ); if ( xsltfactory == null ) { xsltfactory = FACTORY_JAXP; } int nrparams = rep.countNrJobEntryAttributes( id_jobentry, "param_name" ); int nroutputprops = rep.countNrJobEntryAttributes( id_jobentry, "output_property_name" ); allocate( nrparams, nroutputprops ); for ( int i = 0; i < nrparams; i++ ) { parameterField[i] = rep.getJobEntryAttributeString( id_jobentry, i, "param_field" ); parameterName[i] = rep.getJobEntryAttributeString( id_jobentry, i, "param_name" ); } for ( int i = 0; i < nroutputprops; i++ ) { outputPropertyName[i] = rep.getJobEntryAttributeString( id_jobentry, i, "output_property_name" ); outputPropertyValue[i] = rep.getJobEntryAttributeString( id_jobentry, i, "output_property_value" ); } } catch ( KettleException dbe ) { throw new KettleException( "Unable to load job entry of type 'xslt' from the repository for id_jobentry=" + id_jobentry, dbe ); } }
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { // How many connections? int argnr = rep.countNrJobEntryAttributes( id_jobentry, "id_database" ); connections = new DatabaseMeta[argnr]; waitfors = new String[argnr]; waittimes = new int[argnr]; // Read them all... for ( int a = 0; a < argnr; a++ ) { connections[a] = rep.loadDatabaseMetaFromJobEntryAttribute( id_jobentry, "connection", a, "id_database", databases ); waitfors[a] = rep.getJobEntryAttributeString( id_jobentry, a, "waitfor" ); waittimes[a] = getWaitByCode( Const.NVL( rep.getJobEntryAttributeString( id_jobentry, a, "waittime" ), "" ) ); } } catch ( KettleException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "JobEntryCheckDbConnections.ERROR_0002_Cannot_Load_Job_From_Repository", "" + id_jobentry, dbe .getMessage() ) ); } }
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { replaceVars = rep.getJobEntryAttributeBoolean( id_jobentry, "replacevars" ); filename = rep.getJobEntryAttributeString( id_jobentry, "filename" ); fileVariableType = getVariableType( rep.getJobEntryAttributeString( id_jobentry, "file_variable_type" ) ); // How many variableName? int argnr = rep.countNrJobEntryAttributes( id_jobentry, "variable_name" ); allocate( argnr ); // Read them all... for ( int a = 0; a < argnr; a++ ) { variableName[a] = rep.getJobEntryAttributeString( id_jobentry, a, "variable_name" ); variableValue[a] = rep.getJobEntryAttributeString( id_jobentry, a, "variable_value" ); variableType[a] = getVariableType( rep.getJobEntryAttributeString( id_jobentry, a, "variable_type" ) ); } } catch ( KettleException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "JobEntrySetVariables.Meta.UnableLoadRep", String .valueOf( id_jobentry ), dbe.getMessage() ), dbe ); } }
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { arg_from_previous = rep.getJobEntryAttributeBoolean( id_jobentry, "arg_from_previous" ); include_subfolders = rep.getJobEntryAttributeBoolean( id_jobentry, "include_subfolders" ); nr_errors_less_than = rep.getJobEntryAttributeString( id_jobentry, "nr_errors_less_than" ); success_condition = rep.getJobEntryAttributeString( id_jobentry, "success_condition" ); resultfilenames = rep.getJobEntryAttributeString( id_jobentry, "resultfilenames" ); // How many arguments? int argnr = rep.countNrJobEntryAttributes( id_jobentry, "source_filefolder" ); source_filefolder = new String[argnr]; wildcard = new String[argnr]; // Read them all... for ( int a = 0; a < argnr; a++ ) { source_filefolder[a] = rep.getJobEntryAttributeString( id_jobentry, a, "source_filefolder" ); wildcard[a] = rep.getJobEntryAttributeString( id_jobentry, a, "wildcard" ); } } catch ( KettleException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "JobXMLWellFormed.Error.Exception.UnableLoadRep" ) + id_jobentry, dbe ); } }
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { arg_from_previous = rep.getJobEntryAttributeBoolean( id_jobentry, "arg_from_previous" ); include_subfolders = rep.getJobEntryAttributeBoolean( id_jobentry, "include_subfolders" ); nr_errors_less_than = rep.getJobEntryAttributeString( id_jobentry, "nr_errors_less_than" ); success_condition = rep.getJobEntryAttributeString( id_jobentry, "success_condition" ); resultfilenames = rep.getJobEntryAttributeString( id_jobentry, "resultfilenames" ); // How many arguments? int argnr = rep.countNrJobEntryAttributes( id_jobentry, "source_filefolder" ); allocate( argnr ); // Read them all... for ( int a = 0; a < argnr; a++ ) { source_filefolder[a] = rep.getJobEntryAttributeString( id_jobentry, a, "source_filefolder" ); wildcard[a] = rep.getJobEntryAttributeString( id_jobentry, a, "wildcard" ); conversionTypes[a] = getConversionTypeByCode( Const.NVL( rep.getJobEntryAttributeString( id_jobentry, "ConversionType" ), "" ) ); } } catch ( KettleException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "JobDosToUnix.Error.Exception.UnableLoadRep" ) + id_jobentry, dbe ); } }
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { include_subfolders = rep.getJobEntryAttributeBoolean( id_jobentry, "include_subfolders" ); add_result_filenames = rep.getJobEntryAttributeBoolean( id_jobentry, "add_result_filenames" ); is_args_from_previous = rep.getJobEntryAttributeBoolean( id_jobentry, "is_args_from_previous" ); limit = rep.getJobEntryAttributeString( id_jobentry, "limit" ); success_condition = rep.getJobEntryAttributeString( id_jobentry, "success_condition" ); // How many arguments? int argnr = rep.countNrJobEntryAttributes( id_jobentry, "source_filefolder" ); source_filefolder = new String[argnr]; source_wildcard = new String[argnr]; delimiter = new String[argnr]; target_Db = new String[argnr]; target_table = new String[argnr]; // Read them all... for ( int a = 0; a < argnr; a++ ) { source_filefolder[a] = rep.getJobEntryAttributeString( id_jobentry, a, "source_filefolder" ); source_wildcard[a] = rep.getJobEntryAttributeString( id_jobentry, a, "source_wildcard" ); delimiter[a] = rep.getJobEntryAttributeString( id_jobentry, a, "delimiter" ); target_Db[a] = rep.getJobEntryAttributeString( id_jobentry, a, "target_db" ); target_table[a] = rep.getJobEntryAttributeString( id_jobentry, a, "target_table" ); } } catch ( KettleException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "JobEntryMSAccessBulkLoad.Meta.UnableLoadRep", "" + id_jobentry, dbe.getMessage() ), dbe ); } }
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { copy_empty_folders = rep.getJobEntryAttributeBoolean( id_jobentry, "copy_empty_folders" ); arg_from_previous = rep.getJobEntryAttributeBoolean( id_jobentry, "arg_from_previous" ); overwrite_files = rep.getJobEntryAttributeBoolean( id_jobentry, "overwrite_files" ); include_subfolders = rep.getJobEntryAttributeBoolean( id_jobentry, "include_subfolders" ); remove_source_files = rep.getJobEntryAttributeBoolean( id_jobentry, "remove_source_files" ); add_result_filesname = rep.getJobEntryAttributeBoolean( id_jobentry, "add_result_filesname" ); destination_is_a_file = rep.getJobEntryAttributeBoolean( id_jobentry, "destination_is_a_file" ); create_destination_folder = rep.getJobEntryAttributeBoolean( id_jobentry, "create_destination_folder" ); // How many arguments? int argnr = rep.countNrJobEntryAttributes( id_jobentry, "source_filefolder" ); allocate( argnr ); // Read them all... for ( int a = 0; a < argnr; a++ ) { source_filefolder[a] = loadSourceRep( rep, id_jobentry, a ); destination_filefolder[a] = loadDestinationRep( rep, id_jobentry, a ); wildcard[a] = rep.getJobEntryAttributeString( id_jobentry, a, "wildcard" ); } } catch ( KettleException dbe ) { throw new KettleException( BaseMessages.getString( PKG, "JobCopyFiles.Error.Exception.UnableLoadRep" ) + id_jobentry, dbe ); } }
int argnr = rep.countNrJobEntryAttributes( id_jobentry, "header_name" ); allocate( argnr );
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers ) throws KettleException { try { setFileName( rep.getJobEntryAttributeString( id_jobentry, "file_name" ) ); setWorkDirectory( rep.getJobEntryAttributeString( id_jobentry, "work_directory" ) ); argFromPrevious = rep.getJobEntryAttributeBoolean( id_jobentry, "arg_from_previous" ); execPerRow = rep.getJobEntryAttributeBoolean( id_jobentry, "exec_per_row" ); setLogfile = rep.getJobEntryAttributeBoolean( id_jobentry, "set_logfile" ); setAppendLogfile = rep.getJobEntryAttributeBoolean( id_jobentry, "set_append_logfile" ); addDate = rep.getJobEntryAttributeBoolean( id_jobentry, "add_date" ); addTime = rep.getJobEntryAttributeBoolean( id_jobentry, "add_time" ); logfile = rep.getJobEntryAttributeString( id_jobentry, "logfile" ); logext = rep.getJobEntryAttributeString( id_jobentry, "logext" ); logFileLevel = LogLevel.getLogLevelForCode( rep.getJobEntryAttributeString( id_jobentry, "loglevel" ) ); insertScript = rep.getJobEntryAttributeBoolean( id_jobentry, "insertScript" ); script = rep.getJobEntryAttributeString( id_jobentry, "script" ); // How many arguments? int argnr = rep.countNrJobEntryAttributes( id_jobentry, "argument" ); allocate( argnr ); // Read them all... for ( int a = 0; a < argnr; a++ ) { arguments[a] = rep.getJobEntryAttributeString( id_jobentry, a, "argument" ); } } catch ( KettleDatabaseException dbe ) { throw new KettleException( "Unable to load job entry of type 'shell' from the repository with id_jobentry=" + id_jobentry, dbe ); } }
try { int argnr = rep.countNrJobEntryAttributes( id_jobentry, "source_filefolder" ); allocate( argnr );
int argnr = rep.countNrJobEntryAttributes( id_jobentry, "source_filefolder" ); allocate( argnr );
int argnr = rep.countNrJobEntryAttributes( id_jobentry, "source_filefolder" ); allocate( argnr );