public static Document loadXMLString( DocumentBuilder db, String string ) throws KettleXMLException { try { StringReader stringReader = new java.io.StringReader( string ); InputSource inputSource = new InputSource( stringReader ); Document doc; try { doc = db.parse( inputSource ); } catch ( IOException ef ) { throw new KettleXMLException( "Error parsing XML", ef ); } finally { stringReader.close(); } return doc; } catch ( Exception e ) { throw new KettleXMLException( "Error reading information from XML string : " + Const.CR + string, e ); } }
public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep, IMetaStore metaStore ) throws KettleXMLException { try { super.loadXML( entrynode, databases, slaveServers ); foldername = XMLHandler.getTagValue( entrynode, "foldername" ); specifywildcard = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "specify_wildcard" ) ); wildcard = XMLHandler.getTagValue( entrynode, "wildcard" ); wildcardexclude = XMLHandler.getTagValue( entrynode, "wildcardexclude" ); } catch ( KettleXMLException xe ) { throw new KettleXMLException( BaseMessages.getString( PKG, "JobEntryDeleteResultFilenames.CanNotLoadFromXML", xe.getMessage() ) ); } }
@Injection( name = "CONDITION" ) public void setConditionXML( String conditionXML ) { try { this.condition = new Condition( conditionXML ); } catch ( KettleXMLException e ) { log.logError( e.getMessage() ); } } }
public static String formatNode( Node node ) throws KettleXMLException { StringWriter sw = new StringWriter(); try { Transformer t = TransformerFactory.newInstance().newTransformer(); t.setOutputProperty( OutputKeys.OMIT_XML_DECLARATION, "yes" ); t.transform( new DOMSource( node ), new StreamResult( sw ) ); } catch ( Exception e ) { throw new KettleXMLException( "Unable to format Node as XML", e ); } return sw.toString(); }
public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep, IMetaStore metaStore ) throws KettleXMLException { try { super.loadXML( entrynode, databases, slaveServers ); includesubfolders = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "include_subfolders" ) ); comparefilecontent = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "compare_filecontent" ) ); comparefilesize = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "compare_filesize" ) ); compareonly = XMLHandler.getTagValue( entrynode, "compareonly" ); wildcard = XMLHandler.getTagValue( entrynode, "wildcard" ); filename1 = XMLHandler.getTagValue( entrynode, "filename1" ); filename2 = XMLHandler.getTagValue( entrynode, "filename2" ); } catch ( KettleXMLException xe ) { throw new KettleXMLException( BaseMessages.getString( PKG, "JobFoldersCompare.Meta.UnableLoadXML", xe .getMessage() ) ); } }
public static Document loadXMLFile( File resource ) throws KettleXMLException { try { return loadXMLFile( resource.toURI().toURL() ); } catch ( MalformedURLException e ) { throw new KettleXMLException( e ); } }
public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep, IMetaStore metaStore ) throws KettleXMLException { try { super.loadXML( entrynode, databases, slaveServers ); filename = XMLHandler.getTagValue( entrynode, "filename" ); Node fields = XMLHandler.getSubNode( entrynode, "fields" ); // How many field arguments? int nrFields = XMLHandler.countNodes( fields, "field" ); allocate( nrFields ); // Read them all... for ( int i = 0; i < nrFields; i++ ) { Node fnode = XMLHandler.getSubNodeByNr( fields, "field", i ); arguments[i] = XMLHandler.getTagValue( fnode, "name" ); } } catch ( KettleXMLException xe ) { throw new KettleXMLException( BaseMessages.getString( PKG, "JobEntryFilesExist.ERROR_0001_Cannot_Load_Job_Entry_From_Xml_Node", xe.getMessage() ) ); } }
public static DocumentBuilder createDocumentBuilder( boolean namespaceAware, boolean deferNodeExpansion ) throws KettleXMLException { try { DocumentBuilderFactory dbf = XMLParserFactoryProducer.createSecureDocBuilderFactory(); dbf.setFeature( "http://apache.org/xml/features/dom/defer-node-expansion", deferNodeExpansion ); dbf.setNamespaceAware( namespaceAware ); return dbf.newDocumentBuilder(); } catch ( ParserConfigurationException e ) { throw new KettleXMLException( e ); } }
private static void xparseXmlFile( String strFileName ) throws KettleXMLException { try { InputStream is = ScriptValuesHelp.class.getResourceAsStream( strFileName ); int c; StringBuilder buffer = new StringBuilder(); while ( ( c = is.read() ) != -1 ) { buffer.append( (char) c ); } is.close(); dom = XMLHandler.loadXMLString( buffer.toString() ); } catch ( Exception e ) { throw new KettleXMLException( "Unable to read script values help file from file [" + strFileName + "]", e ); } } }
private static void xparseXmlFile( String strFileName ) throws KettleXMLException { try { InputStream is = ScriptHelp.class.getResourceAsStream( strFileName ); int c; StringBuilder buffer = new StringBuilder(); while ( ( c = is.read() ) != -1 ) { buffer.append( (char) c ); } is.close(); dom = XMLHandler.loadXMLString( buffer.toString() ); } catch ( Exception e ) { throw new KettleXMLException( "Unable to read script values help file from file [" + strFileName + "]", e ); } } }
protected void readData( Node stepnode, IMetaStore metastore ) throws KettleXMLException { super.readData( stepnode, metastore ); try { fileAsCommand = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "file", "is_command" ) ); } catch ( Exception e ) { throw new KettleXMLException( "Unable to load step info from XML", e ); } }
private void readData( Node stepnode ) throws KettleXMLException { try { outputRowcount = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "outputRowcount" ) ); rowcountField = XMLHandler.getTagValue( stepnode, "rowcountField" ); } catch ( Exception e ) { throw new KettleXMLException( "Unable to load step info from XML", e ); } }
private void readData( Node stepnode ) throws KettleXMLException { try { resultfieldname = XMLHandler.getTagValue( stepnode, "resultfieldname" ); } catch ( Exception e ) { throw new KettleXMLException( BaseMessages.getString( PKG, "DetectLastRowMeta.Exception.UnableToReadStepInfo" ), e ); } }
public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep, IMetaStore metaStore ) throws KettleXMLException { try { super.loadXML( entrynode, databases, slaveServers ); } catch ( Exception e ) { throw new KettleXMLException( BaseMessages.getString( PKG, "JobEntrySuccess.Meta.UnableToLoadFromXML" ), e ); } }
private void readData(Node stepnode) throws KettleXMLException { try { shapeFilename = XMLHandler.getTagValue(stepnode, "shapefilename"); dbfFilename = XMLHandler.getTagValue(stepnode, "dbffilename"); } catch(Exception e) { throw new KettleXMLException("Unable to load step info from XML", e); } }
/** * Load a file into an XML document * * @param filename * The filename to load into a document * @return the Document if all went well, null if an error occurred! */ public static Document loadXMLFile( String filename ) throws KettleXMLException { try { return loadXMLFile( KettleVFS.getFileObject( filename ) ); } catch ( Exception e ) { throw new KettleXMLException( e ); } }
public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep, IMetaStore metaStore ) throws KettleXMLException { try { super.loadXML( entrynode, databases, slaveServers ); foldername = XMLHandler.getTagValue( entrynode, "foldername" ); failOfFolderExists = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "fail_of_folder_exists" ) ); } catch ( KettleXMLException xe ) { throw new KettleXMLException( "Unable to load job entry of type 'create folder' from XML node", xe ); } }
private void readData( Node stepnode ) throws KettleXMLException { try { filename = XMLHandler.getTagValue( stepnode, "file", "name" ); addToResultFilenames = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "file", "add_to_result_filenames" ) ); doNotOpenNewFileInit = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepnode, "file", "do_not_open_newfile_init" ) ); } catch ( Exception e ) { throw new KettleXMLException( BaseMessages.getString( PKG, "CubeOutputMeta.Exception.UnableToLoadStepInfo" ), e ); } }
@Override public void loadXML( Node stepnode, List<DatabaseMeta> databases, Map<String, Counter> counters ) throws KettleXMLException { try { value = new ValueMetaAndData(); Node valnode = XMLHandler.getSubNode( stepnode, "values", "value" ); if ( valnode != null ) { System.out.println( "reading value in " + valnode ); value.loadXML( valnode ); } } catch ( Exception e ) { throw new KettleXMLException( "Unable to read step info from XML node", e ); } }
public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep, IMetaStore metaStore ) throws KettleXMLException { try { super.loadXML( entrynode, databases, slaveServers ); filename = XMLHandler.getTagValue( entrynode, "filename" ); failIfFileExists = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "fail_if_file_exists" ) ); addfilenameresult = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "add_filename_result" ) ); } catch ( KettleXMLException xe ) { throw new KettleXMLException( "Unable to load job entry of type 'create file' from XML node", xe ); } }