@Override public RecordSchema getSchema(Map<String, String> variables, RecordSchema readSchema) throws SchemaNotFoundException, IOException { final RecordSetWriterFactory writerFactory = recordFactory.get(); if (writerFactory == null) { return null; } try { return writerFactory.getSchema(variables, readSchema); } catch (UndeclaredThrowableException ute) { throw new IOException(ute.getCause()); } } }
@Override public void process(final OutputStream out) throws IOException { final ResultSetRecordSet recordSet; final RecordSchema writeSchema; try { recordSet = new ResultSetRecordSet(rs, readerSchema); final RecordSchema resultSetSchema = recordSet.getSchema(); writeSchema = recordSetWriterFactory.getSchema(originalAttributes, resultSetSchema); } catch (final SQLException | SchemaNotFoundException e) { throw new ProcessException(e); } try (final RecordSetWriter resultSetWriter = recordSetWriterFactory.createWriter(getLogger(), writeSchema, out)) { writeResultRef.set(resultSetWriter.write(recordSet)); mimeTypeRef.set(resultSetWriter.getMimeType()); } catch (final Exception e) { throw new IOException(e); } } });
protected byte[] getData(final ReportingContext context, InputStream in, Map<String, String> attributes) { try (final JsonRecordReader reader = new JsonRecordReader(in, recordSchema)) { final RecordSetWriterFactory writerFactory = context.getProperty(RECORD_WRITER).asControllerService(RecordSetWriterFactory.class); final RecordSchema writeSchema = writerFactory.getSchema(null, recordSchema); final ByteArrayOutputStream out = new ByteArrayOutputStream(); try (final RecordSetWriter writer = writerFactory.createWriter(getLogger(), writeSchema, out)) { writer.beginRecordSet(); Record record; while ((record = reader.nextRecord()) != null) { writer.write(record); } final WriteResult writeResult = writer.finishRecordSet(); attributes.put(CoreAttributes.MIME_TYPE.key(), writer.getMimeType()); attributes.putAll(writeResult.getAttributes()); } return out.toByteArray(); } catch (IOException | SchemaNotFoundException | MalformedRecordException e) { throw new ProcessException("Failed to write metrics using record writer: " + e.getMessage(), e); } }
@Override public long writeResultSet(ResultSet resultSet, OutputStream outputStream, ComponentLog logger, AbstractQueryDatabaseTable.MaxValueResultSetRowCollector callback) throws Exception { final RecordSet recordSet; try { if (fullRecordSet == null) { final Schema avroSchema = JdbcCommon.createSchema(resultSet, options); final RecordSchema recordAvroSchema = AvroTypeUtil.createSchema(avroSchema); fullRecordSet = new ResultSetRecordSetWithCallback(resultSet, recordAvroSchema, callback); writeSchema = recordSetWriterFactory.getSchema(originalAttributes, fullRecordSet.getSchema()); } recordSet = (maxRowsPerFlowFile > 0) ? fullRecordSet.limit(maxRowsPerFlowFile) : fullRecordSet; } catch (final SQLException | SchemaNotFoundException | IOException e) { throw new ProcessException(e); } try (final RecordSetWriter resultSetWriter = recordSetWriterFactory.createWriter(logger, writeSchema, outputStream)) { writeResultRef.set(resultSetWriter.write(recordSet)); if (mimeType == null) { mimeType = resultSetWriter.getMimeType(); } return writeResultRef.get().getRecordCount(); } catch (final Exception e) { throw new IOException(e); } }
@Override public void process(final InputStream in, final OutputStream out) throws IOException { try (final RecordReader reader = readerFactory.createRecordReader(originalAttributes, in, getLogger())) { final RecordSchema writeSchema = writerFactory.getSchema(originalAttributes, reader.getSchema()); try (final RecordSetWriter writer = writerFactory.createWriter(getLogger(), writeSchema, out)) { writer.beginRecordSet(); Record record; while ((record = reader.nextRecord()) != null) { final Record processed = AbstractRecordProcessor.this.process(record, writeSchema, original, context); writer.write(processed); } final WriteResult writeResult = writer.finishRecordSet(); attributes.put("record.count", String.valueOf(writeResult.getRecordCount())); attributes.put(CoreAttributes.MIME_TYPE.key(), writer.getMimeType()); attributes.putAll(writeResult.getAttributes()); recordCount.set(writeResult.getRecordCount()); } } catch (final SchemaNotFoundException e) { throw new ProcessException(e.getLocalizedMessage(), e); } catch (final MalformedRecordException e) { throw new ProcessException("Could not parse incoming data", e); } } });
@Override public void process(final InputStream in) throws IOException { try (final RecordReader reader = readerFactory.createRecordReader(originalAttributes, in, getLogger())) { final RecordSchema writeSchema = writerFactory.getSchema(originalAttributes, reader.getSchema()); Record record; while ((record = reader.nextRecord()) != null) { final Set<Relationship> relationships = route(record, writeSchema, original, context, flowFileContext); numRecords.incrementAndGet(); for (final Relationship relationship : relationships) { final RecordSetWriter recordSetWriter; Tuple<FlowFile, RecordSetWriter> tuple = writers.get(relationship); if (tuple == null) { FlowFile outFlowFile = session.create(original); final OutputStream out = session.write(outFlowFile); recordSetWriter = writerFactory.createWriter(getLogger(), writeSchema, out); recordSetWriter.beginRecordSet(); tuple = new Tuple<>(outFlowFile, recordSetWriter); writers.put(relationship, tuple); } else { recordSetWriter = tuple.getValue(); } recordSetWriter.write(record); } } } catch (final SchemaNotFoundException | MalformedRecordException e) { throw new ProcessException("Could not parse incoming data", e); } } });
try (final RecordReader reader = readerFactory.createRecordReader(originalAttributes, in, getLogger())) { final RecordSchema schema = writerFactory.getSchema(originalAttributes, reader.getSchema());
final RecordReader reader = readerFactory.createRecordReader(originalAttributes, in, getLogger()); final RecordSchema writeSchema = writerFactory.getSchema(originalAttributes, reader.getSchema());
writeSchema = writerFactory.getSchema(Collections.emptyMap(), recordSchema); } catch (final Exception e) { logger.error("Failed to obtain Schema for FlowFile. Will roll back the Kafka message offsets.", e);
final RecordSchema schema = recordSetWriterFactory.getSchema(originalFlowFile.getAttributes(), record == null ? null : record.getSchema());
final RecordSetWriterFactory writerFactory = context.getProperty(RECORD_WRITER).evaluateAttributeExpressions() .asControllerService(RecordSetWriterFactory.class); final RecordSchema schema = writerFactory.getSchema(null, null); final RecordSet recordSet = SolrUtils.solrDocumentsToRecordSet(response.getResults(), schema); final StringBuffer mimeType = new StringBuffer();
WriteResult writeResult = null; final RecordSchema recordSchema = recordSetWriterFactory.getSchema(Collections.EMPTY_MAP, record.getSchema()); try (final OutputStream out = session.write(flowFile); final RecordSetWriter recordWriter = recordSetWriterFactory.createWriter(getLogger(), recordSchema, out)) {
final RecordSchema writeSchema = writerFactory.getSchema(Collections.emptyMap(), recordSchema);
final RecordSchema writeSchema = writerFactory.getSchema(schemaRetrievalVariables, readerSchema); writer = writerFactory.createWriter(logger, writeSchema, out); writer.beginRecordSet();
try (final RecordReader reader = readerFactory.createRecordReader(originalAttributes, in, getLogger())) { final RecordSchema writeSchema = writerFactory.getSchema(originalAttributes, reader.getSchema()); final OutputStream out = session.write(outFlowFile);
try (final InputStream in = session.read(original); final RecordReader reader = readerFactory.createRecordReader(original, in, getLogger())) { schema = writerFactory.getSchema(original.getAttributes(), reader.getSchema()); Record record;
writeSchema = writerFactory.getSchema(flowFile.getAttributes(), recordSchema); } catch (final Exception e) { logger.error("Failed to obtain Schema for FlowFile. Will roll back the Kafka message offsets.", e);
writeSchema = writerFactory.getSchema(flowFile.getAttributes(), recordSchema); } catch (final Exception e) { logger.error("Failed to obtain Schema for FlowFile. Will roll back the Kafka message offsets.", e);
writeSchema = writerFactory.getSchema(flowFile.getAttributes(), recordSchema); } catch (final Exception e) { logger.error("Failed to obtain Schema for FlowFile. Will roll back the Kafka message offsets.", e);
put("schema.name", schemaName); }}; RecordSchema schema = writerFactory.getSchema(attrs, null); RecordSetWriter writer = writerFactory.createWriter(getLogger(), schema, out); long count = 0L;