@Override public void process(final OutputStream out) throws IOException { final ResultSetRecordSet recordSet; final RecordSchema writeSchema; try { recordSet = new ResultSetRecordSet(rs, readerSchema); final RecordSchema resultSetSchema = recordSet.getSchema(); writeSchema = recordSetWriterFactory.getSchema(originalAttributes, resultSetSchema); } catch (final SQLException | SchemaNotFoundException e) { throw new ProcessException(e); } try (final RecordSetWriter resultSetWriter = recordSetWriterFactory.createWriter(getLogger(), writeSchema, out)) { writeResultRef.set(resultSetWriter.write(recordSet)); mimeTypeRef.set(resultSetWriter.getMimeType()); } catch (final Exception e) { throw new IOException(e); } } });
@Override public RecordSetWriter createWriter(ComponentLog logger, RecordSchema schema, OutputStream out) throws SchemaNotFoundException, IOException { if (recordFactory.get() != null) { try { return recordFactory.get().createWriter(logger, schema, out); } catch (UndeclaredThrowableException ute) { throw new IOException(ute.getCause()); } } return null; }
@Override public RecordSchema getSchema(Map<String, String> variables, RecordSchema readSchema) throws SchemaNotFoundException, IOException { final RecordSetWriterFactory writerFactory = recordFactory.get(); if (writerFactory == null) { return null; } try { return writerFactory.getSchema(variables, readSchema); } catch (UndeclaredThrowableException ute) { throw new IOException(ute.getCause()); } } }
@Override public void writeEmptyResultSet(OutputStream outputStream, ComponentLog logger) throws IOException { try (final RecordSetWriter resultSetWriter = recordSetWriterFactory.createWriter(logger, writeSchema, outputStream)) { mimeType = resultSetWriter.getMimeType(); resultSetWriter.beginRecordSet(); resultSetWriter.finishRecordSet(); } catch (final Exception e) { throw new IOException(e); } }
final RecordSetWriterFactory writerFactory = context.getProperty(RECORD_WRITER).evaluateAttributeExpressions() .asControllerService(RecordSetWriterFactory.class); final RecordSchema schema = writerFactory.getSchema(null, null); final RecordSet recordSet = SolrUtils.solrDocumentsToRecordSet(response.getResults(), schema); final StringBuffer mimeType = new StringBuffer();
protected byte[] getData(final ReportingContext context, InputStream in, Map<String, String> attributes) { try (final JsonRecordReader reader = new JsonRecordReader(in, recordSchema)) { final RecordSetWriterFactory writerFactory = context.getProperty(RECORD_WRITER).asControllerService(RecordSetWriterFactory.class); final RecordSchema writeSchema = writerFactory.getSchema(null, recordSchema); final ByteArrayOutputStream out = new ByteArrayOutputStream(); try (final RecordSetWriter writer = writerFactory.createWriter(getLogger(), writeSchema, out)) { writer.beginRecordSet(); Record record; while ((record = reader.nextRecord()) != null) { writer.write(record); } final WriteResult writeResult = writer.finishRecordSet(); attributes.put(CoreAttributes.MIME_TYPE.key(), writer.getMimeType()); attributes.putAll(writeResult.getAttributes()); } return out.toByteArray(); } catch (IOException | SchemaNotFoundException | MalformedRecordException e) { throw new ProcessException("Failed to write metrics using record writer: " + e.getMessage(), e); } }
private RecordSetWriter createIfNecessary(final RecordSetWriter writer, final RecordSetWriterFactory factory, final ProcessSession session, final FlowFile flowFile, final RecordSchema outputSchema) throws SchemaNotFoundException, IOException { if (writer != null) { return writer; } final OutputStream out = session.write(flowFile); final RecordSetWriter created = factory.createWriter(getLogger(), outputSchema, out); created.beginRecordSet(); return created; }
final RecordSchema inputSchema = reader.getSchema(); readerSchema = recordSetWriterFactory.getSchema(originalAttributes, inputSchema); } catch (final Exception e) { getLogger().error("Failed to determine Record Schema from {}; routing to failure", new Object[] {original, e});
@Override public long writeResultSet(ResultSet resultSet, OutputStream outputStream, ComponentLog logger, AbstractQueryDatabaseTable.MaxValueResultSetRowCollector callback) throws Exception { final RecordSet recordSet; try { if (fullRecordSet == null) { final Schema avroSchema = JdbcCommon.createSchema(resultSet, options); final RecordSchema recordAvroSchema = AvroTypeUtil.createSchema(avroSchema); fullRecordSet = new ResultSetRecordSetWithCallback(resultSet, recordAvroSchema, callback); writeSchema = recordSetWriterFactory.getSchema(originalAttributes, fullRecordSet.getSchema()); } recordSet = (maxRowsPerFlowFile > 0) ? fullRecordSet.limit(maxRowsPerFlowFile) : fullRecordSet; } catch (final SQLException | SchemaNotFoundException | IOException e) { throw new ProcessException(e); } try (final RecordSetWriter resultSetWriter = recordSetWriterFactory.createWriter(logger, writeSchema, outputStream)) { writeResultRef.set(resultSetWriter.write(recordSet)); if (mimeType == null) { mimeType = resultSetWriter.getMimeType(); } return writeResultRef.get().getRecordCount(); } catch (final Exception e) { throw new IOException(e); } }
@Override public void process(final OutputStream out) throws IOException { try { final RecordSetWriter writer = writerFactory.createWriter(getLogger(), schema, out); writer.write(recordSet); writer.flush(); mimeType.append(writer.getMimeType()); } catch (SchemaNotFoundException e) { throw new ProcessException("Could not parse Solr response", e); } } });
@Override public RecordSchema getSchema(Map<String, String> variables, RecordSchema readSchema) throws SchemaNotFoundException, IOException { final RecordSetWriterFactory writerFactory = recordFactory.get(); if (writerFactory == null) { return null; } try { return writerFactory.getSchema(variables, readSchema); } catch (UndeclaredThrowableException ute) { throw new IOException(ute.getCause()); } } }
@Override public void process(final InputStream in) throws IOException { try (final RecordReader reader = readerFactory.createRecordReader(originalAttributes, in, getLogger())) { final RecordSchema writeSchema = writerFactory.getSchema(originalAttributes, reader.getSchema()); Record record; while ((record = reader.nextRecord()) != null) { final Set<Relationship> relationships = route(record, writeSchema, original, context, flowFileContext); numRecords.incrementAndGet(); for (final Relationship relationship : relationships) { final RecordSetWriter recordSetWriter; Tuple<FlowFile, RecordSetWriter> tuple = writers.get(relationship); if (tuple == null) { FlowFile outFlowFile = session.create(original); final OutputStream out = session.write(outFlowFile); recordSetWriter = writerFactory.createWriter(getLogger(), writeSchema, out); recordSetWriter.beginRecordSet(); tuple = new Tuple<>(outFlowFile, recordSetWriter); writers.put(relationship, tuple); } else { recordSetWriter = tuple.getValue(); } recordSetWriter.write(record); } } } catch (final SchemaNotFoundException | MalformedRecordException e) { throw new ProcessException("Could not parse incoming data", e); } } });
try (final RecordSetWriter writer = writerFactory.createWriter(logger, schema, baos)) { final WriteResult writeResult = writer.write(record); additionalAttributes = writeResult.getAttributes();
final RecordSetWriterFactory writerFactory = context.getProperty(RECORD_WRITER).evaluateAttributeExpressions() .asControllerService(RecordSetWriterFactory.class); final RecordSchema schema = writerFactory.getSchema(null, null); final RecordSet recordSet = SolrUtils.solrDocumentsToRecordSet(response.getResults(), schema); final StringBuffer mimeType = new StringBuffer();
@Override public void process(final InputStream in, final OutputStream out) throws IOException { try (final RecordReader reader = readerFactory.createRecordReader(originalAttributes, in, getLogger())) { final RecordSchema writeSchema = writerFactory.getSchema(originalAttributes, reader.getSchema()); try (final RecordSetWriter writer = writerFactory.createWriter(getLogger(), writeSchema, out)) { writer.beginRecordSet(); Record record; while ((record = reader.nextRecord()) != null) { final Record processed = AbstractRecordProcessor.this.process(record, writeSchema, original, context); writer.write(processed); } final WriteResult writeResult = writer.finishRecordSet(); attributes.put("record.count", String.valueOf(writeResult.getRecordCount())); attributes.put(CoreAttributes.MIME_TYPE.key(), writer.getMimeType()); attributes.putAll(writeResult.getAttributes()); recordCount.set(writeResult.getRecordCount()); } } catch (final SchemaNotFoundException e) { throw new ProcessException(e.getLocalizedMessage(), e); } catch (final MalformedRecordException e) { throw new ProcessException("Could not parse incoming data", e); } } });
try (final RecordSetWriter writer = writerFactory.createWriter(logger, schema, baos)) { final WriteResult writeResult = writer.write(record); additionalAttributes = writeResult.getAttributes();
final RecordSchema inputSchema = reader.getSchema(); readerSchema = recordSetWriterFactory.getSchema(originalAttributes, inputSchema); } catch (final Exception e) { getLogger().error("Failed to determine Record Schema from {}; routing to failure", new Object[] {original, e});
try (final RecordReader reader = readerFactory.createRecordReader(originalAttributes, in, getLogger())) { final RecordSchema schema = writerFactory.getSchema(originalAttributes, reader.getSchema()); final RecordSetWriter writer = writerFactory.createWriter(getLogger(), schema, out)) { if (maxRecords == 1) { final Record record = pushbackSet.next();
try (final RecordSetWriter writer = writerFactory.createWriter(logger, schema, baos)) { final WriteResult writeResult = writer.write(record); additionalAttributes = writeResult.getAttributes();
writeSchema = writerFactory.getSchema(Collections.emptyMap(), recordSchema); } catch (final Exception e) { logger.error("Failed to obtain Schema for FlowFile. Will roll back the Kafka message offsets.", e); writer = writerFactory.createWriter(logger, writeSchema, rawOut); writer.beginRecordSet();