@Override protected void customAnalyze( GetXMLDataMeta meta, IMetaverseNode node ) throws MetaverseAnalyzerException { super.customAnalyze( meta, node ); // Add the XPath Loop to the step node node.setProperty( "loopXPath", meta.getLoopXPath() ); }
@Override protected void customAnalyze( MetaInjectMeta meta, IMetaverseNode rootNode ) throws MetaverseAnalyzerException { final String sourceStepName = parentTransMeta.environmentSubstitute( meta.getSourceStepName() ); rootNode.setProperty( "sourceStepName", sourceStepName ); rootNode.setProperty( "targetFile", parentTransMeta.environmentSubstitute( meta.getTargetFile() ) ); rootNode.setProperty( "streamSourceStepname", parentTransMeta.environmentSubstitute( meta.getStreamSourceStepname() ) ); rootNode.setProperty( "streamTargetStepname", parentTransMeta.environmentSubstitute( meta.getStreamTargetStepname() ) ); rootNode.setProperty( "runResultingTransformation", !meta.isNoExecution() ); KettleAnalyzerUtil.analyze( this, parentTransMeta, meta, rootNode ); }
@Override protected Set<StepField> getUsedFields( final JsonInputMeta meta ) { Set<StepField> usedFields = new HashSet<>(); if ( meta.isAcceptingFilenames() && StringUtils.isNotEmpty( meta.getAcceptingField() ) ) { final Set<String> inpusStepNames = getInputStepNames( meta, meta.getAcceptingField() ); for ( final String inpusStepName : inpusStepNames ) { final StepField stepField = new StepField( inpusStepName, meta.getAcceptingField() ); usedFields.add( stepField ); } } return usedFields; }
@Override protected void customAnalyze( KafkaConsumerInputMeta meta, IMetaverseNode rootNode ) throws MetaverseAnalyzerException { // add any custom properties or relationships here rootNode.setProperty( "do_nothing", true ); }
@Override protected IMetaverseNode createOutputFieldNode( IAnalysisContext context, ValueMetaInterface fieldMeta, String targetStepName, String nodeType ) { IMetaverseNode fieldNode = super.createOutputFieldNode( context, fieldMeta, targetStepName, nodeType ); GetXMLDataField[] fields = baseStepMeta.getInputFields(); for ( GetXMLDataField field : fields ) { if ( fieldMeta.getName().equals( field.getName() ) ) { fieldNode.setProperty( "xpath", Const.NVL( field.getXPath(), "" ) ); fieldNode.setProperty( "element", Const.NVL( field.getElementTypeCode(), "" ) ); fieldNode.setProperty( "resultType", Const.NVL( field.getResultTypeCode(), "" ) ); fieldNode.setProperty( "repeat", field.isRepeated() ); break; } } return fieldNode; }
@Override public IMetaverseNode createResourceNode( final M meta, final IExternalResourceInfo resource ) throws MetaverseException { IMetaverseNode resourceNode = null; if ( meta instanceof HadoopFileMeta ) { resourceNode = createResourceNode( resource ); final HadoopFileMeta hMeta = (HadoopFileMeta) meta; final String hostName = hMeta.getUrlHostName( resource.getName() ); if ( StringUtils.isNotBlank( hostName ) ) { resourceNode.setProperty( DictionaryConst.PROPERTY_HOST_NAME, hostName ); // update the default "File" type to "HDFS File" resourceNode.setProperty( DictionaryConst.PROPERTY_TYPE, DictionaryConst.NODE_TYPE_FILE ); final String clusterName = hMeta.getClusterName( resource.getName() ); if ( StringUtils.isNotBlank( clusterName ) ) { resourceNode.setProperty( DictionaryConst.PROPERTY_CLUSTER, clusterName ); } } } return resourceNode; } }
final TransMeta subTransMeta = KettleAnalyzerUtil.getSubTransMeta( meta ); subTransMeta.setFilename( transformationPath ); final Vertex streamTargetStepOutputField = findFieldVertex( subTransMeta, meta.getStreamTargetStepname(), targetFieldNames.get( index++ ) ); getMetaverseBuilder().addLink( streamSourceStepOutputField, DictionaryConst.LINK_DERIVES, streamTargetStepOutputField ); } else { getMetaverseBuilder().addLink( targetTemplateField, DictionaryConst.LINK_INPUTS, stepVertex ); DictionaryConst.PROPERTY_LOGICAL_ID ), targetTemplateStepName + ":" + targetTemplateStepAttr.getAttributeKey(), null ); getMetaverseBuilder().addNode( subTransPropertyNode ); final Vertex subTransPropertyVertex = findVertexById( subTransPropertyNode.getStringID() ); if ( subTransPropertyVertex != null ) { getMetaverseBuilder().addLink( targetTemplateStepVertex, DictionaryConst.LINK_CONTAINS, subTransPropertyVertex ); final IMetaverseNode matchingInjectorFieldNode = getInputs().findNode( injectorStepName, injectotFieldName ); if ( matchingInjectorFieldNode != null ) { getMetaverseBuilder().addLink( matchingInjectorFieldNode, DictionaryConst.LINK_POPULATES, subTransPropertyNode ); getMetaverseBuilder().addLink( sourceStepField, DictionaryConst.LINK_INPUTS, stepVertex ); sourceStepField.getProperty( DictionaryConst.PROPERTY_NAME ).toString() ); if ( derivedField != null ) { getMetaverseBuilder().addLink( sourceStepField, DictionaryConst.LINK_DERIVES, derivedField );
@Override public Set<ComponentDerivationRecord> getChangeRecords( GetXMLDataMeta meta ) throws MetaverseAnalyzerException { Set<ComponentDerivationRecord> changes = new HashSet<>(); boolean isInFields = meta.isInFields(); boolean isAFile = meta.getIsAFile(); boolean isAUrl = meta.isReadUrl(); // if we are getting xml from a field, we need to add the "derives" links from the xml to the output fields if ( isInFields && !isAFile && !isAUrl ) { GetXMLDataField[] fields = baseStepMeta.getInputFields(); if ( getInputs() != null ) { Set<StepField> inputFields = getInputs().getFieldNames(); for ( StepField inputField : inputFields ) { if ( inputField.getFieldName().equals( meta.getXMLField() ) ) { // link this to all of the outputs that come from the xml for ( GetXMLDataField field : fields ) { ComponentDerivationRecord change = new ComponentDerivationRecord( meta.getXMLField(), field.getName() ); changes.add( change ); } break; } } } } return changes; }
@Override public Set<ComponentDerivationRecord> getChangeRecords( KafkaProducerOutputMeta meta ) { LinkedHashSet<ComponentDerivationRecord> changes = new LinkedHashSet<>(); Set<String> stepNames = getInputs().getStepNames(); for ( String stepName : stepNames ) { changes.add( new ComponentDerivationRecord( new StepField( stepName, meta.getKeyField() ), new StepField( RESOURCE, KEY ) ) ); changes.add( new ComponentDerivationRecord( new StepField( stepName, meta.getMessageField() ), new StepField( RESOURCE, MESSAGE ) ) ); } return changes; }
@SuppressWarnings( "Duplicates" ) @Override public IMetaverseNode getConnectionNode() { if ( connectionNode == null ) { MetaverseComponentDescriptor connectionDescriptor = new MetaverseComponentDescriptor( baseStepMeta.getBootstrapServers(), NODE_TYPE_KAFKA_SERVER, getDescriptor().getNamespace(), getDescriptor().getContext() ); connectionNode = createNodeFromDescriptor( connectionDescriptor ); } return connectionNode; }
@Override protected Set<StepField> getUsedFields( KafkaProducerOutputMeta meta ) { LinkedHashSet<StepField> usedFields = new LinkedHashSet<>(); Set<String> stepNames = getInputs().getStepNames(); for ( String stepName : stepNames ) { if ( meta.getKeyField() != null ) { usedFields.add( new StepField( stepName, meta.getKeyField() ) ); } if ( meta.getMessageField() != null ) { usedFields.add( new StepField( stepName, meta.getMessageField() ) ); } } return usedFields; }
@Override protected IMetaverseNode createTableNode( IExternalResourceInfo resource ) { MetaverseComponentDescriptor topicDescriptor = new MetaverseComponentDescriptor( ( (KafkaResourceInfo) resource ).getTopic(), NODE_TYPE_KAFKA_TOPIC, getConnectionNode(), getDescriptor().getContext() ); return createNodeFromDescriptor( topicDescriptor ); }
@Override protected void customAnalyze( XMLOutputMeta meta, IMetaverseNode node ) throws MetaverseAnalyzerException { super.customAnalyze( meta, node ); node.setProperty( "parentnode", meta.getMainElement() ); node.setProperty( "rownode", meta.getRepeatElement() ); }
@Override protected void customAnalyze( final JmsConsumerMeta meta, final IMetaverseNode rootNode ) throws MetaverseAnalyzerException { // TODO: When/If adding JmsProducerAnalyzer, move common code to new base class. final JmsDelegate jmsDelegate = meta.getJmsDelegate(); rootNode.setProperty( "batchSize", parentTransMeta.environmentSubstitute( meta.getBatchSize() ) ); rootNode.setProperty( "batchDuration", parentTransMeta.environmentSubstitute( meta.getBatchDuration() ) ); rootNode.setProperty( "connectionType", jmsDelegate.getConnectionType() ); rootNode.setProperty( "connectionUrl", parentTransMeta.environmentSubstitute( jmsDelegate.getConnectionUrl() ) ); rootNode.setProperty( "destinationType", parentTransMeta.environmentSubstitute( jmsDelegate.getDestinationType() ) ); rootNode.setProperty( "destinationName", parentTransMeta.environmentSubstitute( jmsDelegate.getDestinationName() ) ); rootNode.setProperty( "receiveTimeout", parentTransMeta.environmentSubstitute( meta.getReceiveTimeout() ) ); KettleAnalyzerUtil.analyze( this, parentTransMeta, meta, rootNode ); }
super.customAnalyze( meta, rootNode ); if ( meta.isAcceptingFilenames() ) { rootNode.setProperty( "fileNameStep", meta.getAcceptingStepName() ); rootNode.setProperty( "fileNameField", meta.getAcceptingField() ); rootNode.setProperty( "passingThruFields", meta.inputFiles.passingThruFields ); rootNode.setProperty( "fileType", meta.content.fileType ); rootNode.setProperty( "separator", meta.content.separator ); rootNode.setProperty( "enclosure", meta.content.enclosure ); rootNode.setProperty( "breakInEnclosureAllowed", meta.content.breakInEnclosureAllowed ); rootNode.setProperty( "escapeCharacter", meta.content.escapeCharacter ); if ( meta.content.header ) { rootNode.setProperty( "nrHeaderLines", meta.content.nrHeaderLines ); rootNode.setProperty( "nrFooterLines", meta.content.nrFooterLines ); rootNode.setProperty( "nrWraps", meta.content.nrWraps ); rootNode.setProperty( "nrLinesPerPage", meta.content.nrLinesPerPage ); rootNode.setProperty( "nrLinesDocHeader", meta.content.nrLinesDocHeader ); rootNode.setProperty( "fileCompression", meta.content.fileCompression ); rootNode.setProperty( "noEmptyLines", meta.content.noEmptyLines ); rootNode.setProperty( "includeFilename", meta.content.includeFilename ); if ( meta.content.includeFilename ) { rootNode.setProperty( "filenameField", meta.content.filenameField ); rootNode.setProperty( "includeRowNumber", meta.content.includeRowNumber ); if ( meta.content.includeFilename ) {
@Override public IMetaverseNode getConnectionNode() { if ( connectionNode == null ) { MetaverseComponentDescriptor connectionDescriptor = new MetaverseComponentDescriptor( baseStepMeta.getBootstrapServers(), NODE_TYPE_KAFKA_SERVER, getDescriptor().getNamespace(), getDescriptor().getContext() ); connectionNode = createNodeFromDescriptor( connectionDescriptor ); } return connectionNode; }
@Override protected void customAnalyze( final JsonInputMeta meta, final IMetaverseNode rootNode ) throws MetaverseAnalyzerException { super.customAnalyze( meta, rootNode ); if ( meta.isAcceptingFilenames() ) { rootNode.setProperty( "sourceField", meta.getAcceptingField() ); rootNode.setProperty( "sourceFieldIsFile", meta.getIsAFile() ); rootNode.setProperty( "sourceFieldIsUrl", meta.isReadUrl() ); rootNode.setProperty( "removeSourceField", meta.isRemoveSourceField() ); } else { rootNode.setProperty( "fileDirName", meta.getFilenameField() ); } }
throws MetaverseAnalyzerException { super.customAnalyze( meta, rootNode ); rootNode.setProperty( "createParentFolder", meta.isCreateParentFolder() ); rootNode.setProperty( "doNotOpenNewFileInit", meta.isDoNotOpenNewFileInit() ); if ( meta.isFileNameInField() ) { rootNode.setProperty( "fileNameField", meta.getFileNameField() ); rootNode.setProperty( "extension", meta.getExtension() ); rootNode.setProperty( "stepNrInFilename", meta.isStepNrInFilename() ); rootNode.setProperty( "partNrInFilename", meta.isPartNrInFilename() ); rootNode.setProperty( "dateInFilename", meta.isDateInFilename() ); rootNode.setProperty( "timeInFilename", meta.isTimeInFilename() ); if ( meta.isSpecifyingFormat() ) { rootNode.setProperty( "dateTimeFormat", meta.getDateTimeFormat() ); rootNode.setProperty( "addFilenamesToResult", meta.isAddToResultFiles() ); rootNode.setProperty( "append", meta.isFileAppended() ); rootNode.setProperty( "separator", meta.getSeparator() ); rootNode.setProperty( "enclosure", meta.getEnclosure() ); rootNode.setProperty( "forceEnclosure", meta.isEnclosureForced() ); rootNode.setProperty( "addHeader", meta.isHeaderEnabled() ); rootNode.setProperty( "addFooter", meta.isFooterEnabled() ); rootNode.setProperty( "fileFormat", meta.getFileFormat() ); rootNode.setProperty( "fileCompression", meta.getFileCompression() ); rootNode.setProperty( "encoding", meta.getEncoding() ); rootNode.setProperty( "rightPadFields", meta.isPadded() ); rootNode.setProperty( "fastDataDump", meta.isFastDump() ); rootNode.setProperty( "splitEveryRows", meta.getSplitEveryRows() ); rootNode.setProperty( "endingLine", meta.getEndedLine() );
@Override protected void customAnalyze( final JsonOutputMeta meta, final IMetaverseNode rootNode ) throws MetaverseAnalyzerException { super.customAnalyze( meta, rootNode ); rootNode.setProperty( "isFileAppended", meta.isFileAppended() ); rootNode.setProperty( "passDataToServletOutput", meta.passDataToServletOutput() ); rootNode.setProperty( "addToResult", meta.AddToResult() ); rootNode.setProperty( "jsonBloc", meta.getJsonBloc() ); rootNode.setProperty( "operationType", meta.getOperationType() ); if ( !StringUtils.isBlank( meta.getOutputValue() ) ) { rootNode.setProperty( "outputValue", meta.getOutputValue() ); } }
@Override protected void customAnalyze( JobEntrySparkSubmit entry, IMetaverseNode rootNode ) throws MetaverseAnalyzerException { // -- Common properties rootNode.setProperty( ARGUMENTS, entry.environmentSubstitute( entry.getArgs() ) ); rootNode.setProperty( EXEC_MEMORY, entry.environmentSubstitute( entry.getExecutorMemory() ) ); rootNode.setProperty( DRIVER_MEMORY, entry.environmentSubstitute( entry.getDriverMemory() ) ); rootNode.setProperty( MASTER_URL, entry.environmentSubstitute( entry.getMaster() ) ); if ( JobEntrySparkSubmit.JOB_TYPE_JAVA_SCALA.equals( entry.getJobType() ) ) { // --- Java / Scala properties rootNode.setProperty( CLASS_NAME, entry.environmentSubstitute( entry.getClassName() ) ); if ( StringUtils.isNotBlank( entry.getJar() ) ) { rootNode.setProperty( MetaverseAnalyzers.JobEntrySparkSubmitAnalyzer.APPLICATION_JAR, normalizePath( entry.environmentSubstitute( entry.getJar() ) ) ); } } else if ( JobEntrySparkSubmit.JOB_TYPE_PYTHON.equals( entry.getJobType() ) ) { // Python properties if ( StringUtils.isNotBlank( entry.getPyFile() ) ) { rootNode.setProperty( MetaverseAnalyzers.JobEntrySparkSubmitAnalyzer.APPLICATION_JAR, normalizePath( entry.environmentSubstitute( entry.getPyFile() ) ) ); } } }