@Override protected void customAnalyze( final JsonOutputMeta meta, final IMetaverseNode rootNode ) throws MetaverseAnalyzerException { super.customAnalyze( meta, rootNode ); rootNode.setProperty( "isFileAppended", meta.isFileAppended() ); rootNode.setProperty( "passDataToServletOutput", meta.passDataToServletOutput() ); rootNode.setProperty( "addToResult", meta.AddToResult() ); rootNode.setProperty( "jsonBloc", meta.getJsonBloc() ); rootNode.setProperty( "operationType", meta.getOperationType() ); if ( !StringUtils.isBlank( meta.getOutputValue() ) ) { rootNode.setProperty( "outputValue", meta.getOutputValue() ); } }
@Override protected void customAnalyze( XMLOutputMeta meta, IMetaverseNode node ) throws MetaverseAnalyzerException { super.customAnalyze( meta, node ); node.setProperty( "parentnode", meta.getMainElement() ); node.setProperty( "rownode", meta.getRepeatElement() ); }
@Override protected void customAnalyze( final JsonInputMeta meta, final IMetaverseNode rootNode ) throws MetaverseAnalyzerException { super.customAnalyze( meta, rootNode ); if ( meta.isAcceptingFilenames() ) { rootNode.setProperty( "sourceField", meta.getAcceptingField() ); rootNode.setProperty( "sourceFieldIsFile", meta.getIsAFile() ); rootNode.setProperty( "sourceFieldIsUrl", meta.isReadUrl() ); rootNode.setProperty( "removeSourceField", meta.isRemoveSourceField() ); } else { rootNode.setProperty( "fileDirName", meta.getFilenameField() ); } }
@Override protected void customAnalyze( GetXMLDataMeta meta, IMetaverseNode node ) throws MetaverseAnalyzerException { super.customAnalyze( meta, node ); // Add the XPath Loop to the step node node.setProperty( "loopXPath", meta.getLoopXPath() ); }
@Override protected void customAnalyze( MetaInjectMeta meta, IMetaverseNode rootNode ) throws MetaverseAnalyzerException { final String sourceStepName = parentTransMeta.environmentSubstitute( meta.getSourceStepName() ); rootNode.setProperty( "sourceStepName", sourceStepName ); rootNode.setProperty( "targetFile", parentTransMeta.environmentSubstitute( meta.getTargetFile() ) ); rootNode.setProperty( "streamSourceStepname", parentTransMeta.environmentSubstitute( meta.getStreamSourceStepname() ) ); rootNode.setProperty( "streamTargetStepname", parentTransMeta.environmentSubstitute( meta.getStreamTargetStepname() ) ); rootNode.setProperty( "runResultingTransformation", !meta.isNoExecution() ); KettleAnalyzerUtil.analyze( this, parentTransMeta, meta, rootNode ); }
@Override protected void customAnalyze( final JmsConsumerMeta meta, final IMetaverseNode rootNode ) throws MetaverseAnalyzerException { // TODO: When/If adding JmsProducerAnalyzer, move common code to new base class. final JmsDelegate jmsDelegate = meta.getJmsDelegate(); rootNode.setProperty( "batchSize", parentTransMeta.environmentSubstitute( meta.getBatchSize() ) ); rootNode.setProperty( "batchDuration", parentTransMeta.environmentSubstitute( meta.getBatchDuration() ) ); rootNode.setProperty( "connectionType", jmsDelegate.getConnectionType() ); rootNode.setProperty( "connectionUrl", parentTransMeta.environmentSubstitute( jmsDelegate.getConnectionUrl() ) ); rootNode.setProperty( "destinationType", parentTransMeta.environmentSubstitute( jmsDelegate.getDestinationType() ) ); rootNode.setProperty( "destinationName", parentTransMeta.environmentSubstitute( jmsDelegate.getDestinationName() ) ); rootNode.setProperty( "receiveTimeout", parentTransMeta.environmentSubstitute( meta.getReceiveTimeout() ) ); KettleAnalyzerUtil.analyze( this, parentTransMeta, meta, rootNode ); }
@Override protected IMetaverseNode createOutputFieldNode( IAnalysisContext context, ValueMetaInterface fieldMeta, String targetStepName, String nodeType ) { IMetaverseNode fieldNode = super.createOutputFieldNode( context, fieldMeta, targetStepName, nodeType ); GetXMLDataField[] fields = baseStepMeta.getInputFields(); for ( GetXMLDataField field : fields ) { if ( fieldMeta.getName().equals( field.getName() ) ) { fieldNode.setProperty( "xpath", Const.NVL( field.getXPath(), "" ) ); fieldNode.setProperty( "element", Const.NVL( field.getElementTypeCode(), "" ) ); fieldNode.setProperty( "resultType", Const.NVL( field.getResultTypeCode(), "" ) ); fieldNode.setProperty( "repeat", field.isRepeated() ); break; } } return fieldNode; }
@Override protected void customAnalyze( KafkaConsumerInputMeta meta, IMetaverseNode rootNode ) throws MetaverseAnalyzerException { // add any custom properties or relationships here rootNode.setProperty( "do_nothing", true ); }
super.customAnalyze( meta, rootNode ); if ( meta.isAcceptingFilenames() ) { rootNode.setProperty( "fileNameStep", meta.getAcceptingStepName() ); rootNode.setProperty( "fileNameField", meta.getAcceptingField() ); rootNode.setProperty( "passingThruFields", meta.inputFiles.passingThruFields ); rootNode.setProperty( "fileType", meta.content.fileType ); rootNode.setProperty( "separator", meta.content.separator ); rootNode.setProperty( "enclosure", meta.content.enclosure ); rootNode.setProperty( "breakInEnclosureAllowed", meta.content.breakInEnclosureAllowed ); rootNode.setProperty( "escapeCharacter", meta.content.escapeCharacter ); if ( meta.content.header ) { rootNode.setProperty( "nrHeaderLines", meta.content.nrHeaderLines ); rootNode.setProperty( "nrFooterLines", meta.content.nrFooterLines ); rootNode.setProperty( "nrWraps", meta.content.nrWraps ); rootNode.setProperty( "nrLinesPerPage", meta.content.nrLinesPerPage ); rootNode.setProperty( "nrLinesDocHeader", meta.content.nrLinesDocHeader ); rootNode.setProperty( "fileCompression", meta.content.fileCompression ); rootNode.setProperty( "noEmptyLines", meta.content.noEmptyLines ); rootNode.setProperty( "includeFilename", meta.content.includeFilename ); if ( meta.content.includeFilename ) { rootNode.setProperty( "filenameField", meta.content.filenameField ); rootNode.setProperty( "includeRowNumber", meta.content.includeRowNumber ); if ( meta.content.includeFilename ) {
@Override public IMetaverseNode createResourceNode( final M meta, final IExternalResourceInfo resource ) throws MetaverseException { IMetaverseNode resourceNode = null; if ( meta instanceof HadoopFileMeta ) { resourceNode = createResourceNode( resource ); final HadoopFileMeta hMeta = (HadoopFileMeta) meta; final String hostName = hMeta.getUrlHostName( resource.getName() ); if ( StringUtils.isNotBlank( hostName ) ) { resourceNode.setProperty( DictionaryConst.PROPERTY_HOST_NAME, hostName ); // update the default "File" type to "HDFS File" resourceNode.setProperty( DictionaryConst.PROPERTY_TYPE, DictionaryConst.NODE_TYPE_FILE ); final String clusterName = hMeta.getClusterName( resource.getName() ); if ( StringUtils.isNotBlank( clusterName ) ) { resourceNode.setProperty( DictionaryConst.PROPERTY_CLUSTER, clusterName ); } } } return resourceNode; } }
throws MetaverseAnalyzerException { super.customAnalyze( meta, rootNode ); rootNode.setProperty( "createParentFolder", meta.isCreateParentFolder() ); rootNode.setProperty( "doNotOpenNewFileInit", meta.isDoNotOpenNewFileInit() ); if ( meta.isFileNameInField() ) { rootNode.setProperty( "fileNameField", meta.getFileNameField() ); rootNode.setProperty( "extension", meta.getExtension() ); rootNode.setProperty( "stepNrInFilename", meta.isStepNrInFilename() ); rootNode.setProperty( "partNrInFilename", meta.isPartNrInFilename() ); rootNode.setProperty( "dateInFilename", meta.isDateInFilename() ); rootNode.setProperty( "timeInFilename", meta.isTimeInFilename() ); if ( meta.isSpecifyingFormat() ) { rootNode.setProperty( "dateTimeFormat", meta.getDateTimeFormat() ); rootNode.setProperty( "addFilenamesToResult", meta.isAddToResultFiles() ); rootNode.setProperty( "append", meta.isFileAppended() ); rootNode.setProperty( "separator", meta.getSeparator() ); rootNode.setProperty( "enclosure", meta.getEnclosure() ); rootNode.setProperty( "forceEnclosure", meta.isEnclosureForced() ); rootNode.setProperty( "addHeader", meta.isHeaderEnabled() ); rootNode.setProperty( "addFooter", meta.isFooterEnabled() ); rootNode.setProperty( "fileFormat", meta.getFileFormat() ); rootNode.setProperty( "fileCompression", meta.getFileCompression() ); rootNode.setProperty( "encoding", meta.getEncoding() ); rootNode.setProperty( "rightPadFields", meta.isPadded() ); rootNode.setProperty( "fastDataDump", meta.isFastDump() ); rootNode.setProperty( "splitEveryRows", meta.getSplitEveryRows() ); rootNode.setProperty( "endingLine", meta.getEndedLine() );
@Override protected void customAnalyze( JobEntrySparkSubmit entry, IMetaverseNode rootNode ) throws MetaverseAnalyzerException { // -- Common properties rootNode.setProperty( ARGUMENTS, entry.environmentSubstitute( entry.getArgs() ) ); rootNode.setProperty( EXEC_MEMORY, entry.environmentSubstitute( entry.getExecutorMemory() ) ); rootNode.setProperty( DRIVER_MEMORY, entry.environmentSubstitute( entry.getDriverMemory() ) ); rootNode.setProperty( MASTER_URL, entry.environmentSubstitute( entry.getMaster() ) ); if ( JobEntrySparkSubmit.JOB_TYPE_JAVA_SCALA.equals( entry.getJobType() ) ) { // --- Java / Scala properties rootNode.setProperty( CLASS_NAME, entry.environmentSubstitute( entry.getClassName() ) ); if ( StringUtils.isNotBlank( entry.getJar() ) ) { rootNode.setProperty( MetaverseAnalyzers.JobEntrySparkSubmitAnalyzer.APPLICATION_JAR, normalizePath( entry.environmentSubstitute( entry.getJar() ) ) ); } } else if ( JobEntrySparkSubmit.JOB_TYPE_PYTHON.equals( entry.getJobType() ) ) { // Python properties if ( StringUtils.isNotBlank( entry.getPyFile() ) ) { rootNode.setProperty( MetaverseAnalyzers.JobEntrySparkSubmitAnalyzer.APPLICATION_JAR, normalizePath( entry.environmentSubstitute( entry.getPyFile() ) ) ); } } }