throws IllegalArgumentException, MalformedRecordException, IOException, SQLException { final RecordSchema recordSchema = recordParser.getSchema(); while ((currentRecord = recordParser.nextRecord()) != null) { Object sql = currentRecord.getValue(sqlField); if (sql == null || StringUtils.isEmpty((String) sql)) {
try (final RecordReader reader = readerFactory.createRecordReader(originalAttributes, in, getLogger())) { final RecordSchema schema = writerFactory.getSchema(originalAttributes, reader.getSchema()); final RecordSet recordSet = reader.createRecordSet(); final PushBackRecordSet pushbackSet = new PushBackRecordSet(recordSet);
@Override public void close() { if (recordParser != null) { try { recordParser.close(); } catch (final Exception e) { logger.warn("Failed to close decorated source for " + flowFile, e); } } try { rawIn.close(); } catch (final Exception e) { logger.warn("Failed to close InputStream for " + flowFile, e); } } }
/** * Returns the next record in the stream or <code>null</code> if no more records are available. Types will be coerced and any unknown fields will be dropped. * * @return the next record in the stream or <code>null</code> if no more records are available. * * @throws IOException if unable to read from the underlying data * @throws MalformedRecordException if an unrecoverable failure occurs when trying to parse a record * @throws SchemaValidationException if a Record contains a field that violates the schema and cannot be coerced into the appropriate field type. */ default Record nextRecord() throws IOException, MalformedRecordException { return nextRecord(true, true); }
@Override public RecordSchema getSchema() throws IOException { try { return RecordReader.this.getSchema(); } catch (final MalformedRecordException mre) { throw new IOException(mre); } }
while ((record = recordReader.nextRecord()) != null) { if (recordWriter == null) { final OutputStream rawOut = session.write(merged); recordReader.close(); flowFileSession.migrate(this.session, Collections.singleton(flowFile)); flowFileMigrated = true; recordReader.close();
@Override public Record next() throws IOException { try { return RecordReader.this.nextRecord(); } catch (final MalformedRecordException mre) { throw new IOException(mre); } } };
RecordSchema recordSchema = recordReader.getSchema(); for (RecordField field : recordSchema.getFields()) { String fieldName = field.getFieldName();
while ((record = recordReader.nextRecord()) != null) { if (recordWriter == null) { final OutputStream rawOut = session.write(merged); recordReader.close(); flowFileSession.migrate(this.session, Collections.singleton(flowFile)); flowFileMigrated = true; recordReader.close();
@Override public void process(final InputStream in) throws IOException { try (final RecordReader reader = readerFactory.createRecordReader(originalAttributes, in, getLogger())) { final RecordSchema writeSchema = writerFactory.getSchema(originalAttributes, reader.getSchema()); Record record; while ((record = reader.nextRecord()) != null) { final Set<Relationship> relationships = route(record, writeSchema, original, context, flowFileContext); numRecords.incrementAndGet(); for (final Relationship relationship : relationships) { final RecordSetWriter recordSetWriter; Tuple<FlowFile, RecordSetWriter> tuple = writers.get(relationship); if (tuple == null) { FlowFile outFlowFile = session.create(original); final OutputStream out = session.write(outFlowFile); recordSetWriter = writerFactory.createWriter(getLogger(), writeSchema, out); recordSetWriter.beginRecordSet(); tuple = new Tuple<>(outFlowFile, recordSetWriter); writers.put(relationship, tuple); } else { recordSetWriter = tuple.getValue(); } recordSetWriter.write(record); } } } catch (final SchemaNotFoundException | MalformedRecordException e) { throw new ProcessException("Could not parse incoming data", e); } } });
@Override public void write(long writeId, InputStream inputStream) throws StreamingException { // The inputStream is already available to the recordReader, so just iterate through the records try { Record record; while ((record = recordReader.nextRecord()) != null) { write(writeId, record); } } catch (MalformedRecordException | IOException e) { throw new StreamingException(e.getLocalizedMessage(), e); } }
@Override public RelDataType getRowType(final RelDataTypeFactory typeFactory) { if (relDataType != null) { return relDataType; } RecordSchema schema; try (final InputStream in = session.read(flowFile)) { final RecordReader recordParser = recordParserFactory.createRecordReader(flowFile, in, logger); schema = recordParser.getSchema(); } catch (final Exception e) { throw new ProcessException("Failed to determine schema of data records for " + flowFile, e); } final List<String> names = new ArrayList<>(); final List<RelDataType> types = new ArrayList<>(); final JavaTypeFactory javaTypeFactory = (JavaTypeFactory) typeFactory; for (final RecordField field : schema.getFields()) { names.add(field.getFieldName()); final RelDataType relDataType = getRelDataType(field.getDataType(), javaTypeFactory); types.add(javaTypeFactory.createTypeWithNullability(relDataType, field.isNullable())); } logger.debug("Found Schema: {}", new Object[] {schema}); if (recordSchema == null) { recordSchema = schema; } relDataType = typeFactory.createStructType(Pair.zip(names, types)); return relDataType; }
try (final InputStream in = session.read(flowFile); final RecordReader recordReader = recordReaderFactory.createRecordReader(flowFile, in, getLogger())) { final List<String> fieldNames = recordReader.getSchema().getFieldNames(); final RecordSet recordSet = recordReader.createRecordSet();
@Override public void close() { if (recordParser != null) { try { recordParser.close(); } catch (final Exception e) { logger.warn("Failed to close decorated source for " + flowFile, e); } } try { rawIn.close(); } catch (final Exception e) { logger.warn("Failed to close InputStream for " + flowFile, e); } } }
@Override public void process(final InputStream in, final OutputStream out) throws IOException { try (final RecordReader reader = readerFactory.createRecordReader(originalAttributes, in, getLogger())) { final RecordSchema writeSchema = writerFactory.getSchema(originalAttributes, reader.getSchema()); try (final RecordSetWriter writer = writerFactory.createWriter(getLogger(), writeSchema, out)) { writer.beginRecordSet(); Record record; while ((record = reader.nextRecord()) != null) { final Record processed = AbstractRecordProcessor.this.process(record, writeSchema, original, context); writer.write(processed); } final WriteResult writeResult = writer.finishRecordSet(); attributes.put("record.count", String.valueOf(writeResult.getRecordCount())); attributes.put(CoreAttributes.MIME_TYPE.key(), writer.getMimeType()); attributes.putAll(writeResult.getAttributes()); recordCount.set(writeResult.getRecordCount()); } } catch (final SchemaNotFoundException e) { throw new ProcessException(e.getLocalizedMessage(), e); } catch (final MalformedRecordException e) { throw new ProcessException("Could not parse incoming data", e); } } });
while ((record = reader.nextRecord()) != null) { for (Map.Entry<String, RecordPath> entry : paths.entrySet()) { RecordPathResult result = entry.getValue().evaluate(record);
protected RecordSchema getValidationSchema(final ProcessContext context, final FlowFile flowFile, final RecordReader reader) throws MalformedRecordException, IOException, SchemaNotFoundException { final String schemaAccessStrategy = context.getProperty(SCHEMA_ACCESS_STRATEGY).getValue(); if (schemaAccessStrategy.equals(READER_SCHEMA.getValue())) { return reader.getSchema(); } else if (schemaAccessStrategy.equals(SCHEMA_NAME_PROPERTY.getValue())) { final SchemaRegistry schemaRegistry = context.getProperty(SCHEMA_REGISTRY).asControllerService(SchemaRegistry.class); final String schemaName = context.getProperty(SCHEMA_NAME).evaluateAttributeExpressions(flowFile).getValue(); final SchemaIdentifier schemaIdentifier = SchemaIdentifier.builder().name(schemaName).build(); return schemaRegistry.retrieveSchema(schemaIdentifier); } else if (schemaAccessStrategy.equals(SCHEMA_TEXT_PROPERTY.getValue())) { final String schemaText = context.getProperty(SCHEMA_TEXT).evaluateAttributeExpressions(flowFile).getValue(); final Parser parser = new Schema.Parser(); final Schema avroSchema = parser.parse(schemaText); return AvroTypeUtil.createSchema(avroSchema); } else { throw new ProcessException("Invalid Schema Access Strategy: " + schemaAccessStrategy); } } }
final RecordSet recordSet = recordReader.createRecordSet(); recordWriter = createHDFSRecordWriter(context, flowFile, configuration, tempFile, recordReader.getSchema()); writeResult.set(recordWriter.write(recordSet)); } catch (Exception e) {
throws IllegalArgumentException, MalformedRecordException, IOException, SQLException { final RecordSchema recordSchema = recordParser.getSchema(); final ComponentLog log = getLogger(); int batchIndex = 0; while ((currentRecord = recordParser.nextRecord()) != null) { Object[] values = currentRecord.getValues(); if (values != null) {
return Optional.ofNullable(errOrReader.get().getValue().nextRecord()); } catch (Exception e) { throw new LookupFailureException(String.format("Failed to read Record when looking up with %s", coordinates), e);