protected byte[] getData(final ReportingContext context, InputStream in, Map<String, String> attributes) { try (final JsonRecordReader reader = new JsonRecordReader(in, recordSchema)) { final RecordSetWriterFactory writerFactory = context.getProperty(RECORD_WRITER).asControllerService(RecordSetWriterFactory.class); final RecordSchema writeSchema = writerFactory.getSchema(null, recordSchema); final ByteArrayOutputStream out = new ByteArrayOutputStream(); try (final RecordSetWriter writer = writerFactory.createWriter(getLogger(), writeSchema, out)) { writer.beginRecordSet(); Record record; while ((record = reader.nextRecord()) != null) { writer.write(record); } final WriteResult writeResult = writer.finishRecordSet(); attributes.put(CoreAttributes.MIME_TYPE.key(), writer.getMimeType()); attributes.putAll(writeResult.getAttributes()); } return out.toByteArray(); } catch (IOException | SchemaNotFoundException | MalformedRecordException e) { throw new ProcessException("Failed to write metrics using record writer: " + e.getMessage(), e); } }
private void completeFlowFile(final ProcessSession session, final FlowFile flowFile, final RecordSetWriter writer, final Relationship relationship, final String details) throws IOException { final WriteResult writeResult = writer.finishRecordSet(); writer.close(); final Map<String, String> attributes = new HashMap<>(); attributes.putAll(writeResult.getAttributes()); attributes.put("record.count", String.valueOf(writeResult.getRecordCount())); attributes.put(CoreAttributes.MIME_TYPE.key(), writer.getMimeType()); session.putAllAttributes(flowFile, attributes); session.transfer(flowFile, relationship); session.getProvenanceReporter().route(flowFile, relationship, details); }
@Override public void onSuccess(Object value) { log.debug(" FlowFile Processing Success: {}", new Object[]{value.toString()}); try { synchronized (successfulRecordWriter) { successfulRecordWriter.write(record); successfulRecordWriter.flush(); successfulFlowFileCount.incrementAndGet(); } } catch (final IOException ioe) { log.error("Error transferring record to success, this may result in data loss. " + "However the record was successfully processed by Druid", new Object[]{ioe.getMessage()}, ioe); recordWriteErrors.incrementAndGet(); } } });
@Override public void writeEmptyResultSet(OutputStream outputStream, ComponentLog logger) throws IOException { try (final RecordSetWriter resultSetWriter = recordSetWriterFactory.createWriter(logger, writeSchema, outputStream)) { mimeType = resultSetWriter.getMimeType(); resultSetWriter.beginRecordSet(); resultSetWriter.finishRecordSet(); } catch (final Exception e) { throw new IOException(e); } }
@Override public void process(final OutputStream out) throws IOException { try { final RecordSetWriter writer = writerFactory.createWriter(getLogger(), schema, out); writer.write(recordSet); writer.flush(); mimeType.append(writer.getMimeType()); } catch (SchemaNotFoundException e) { throw new ProcessException("Could not parse Solr response", e); } } });
writer.beginRecordSet(); writer.write(record); tracker.incrementRecordCount(1L); session.adjustCounter("Records Received", 1, false); writer.close();
@Override public void process(final InputStream in) throws IOException { try (final RecordReader reader = readerFactory.createRecordReader(originalAttributes, in, getLogger())) { final RecordSchema writeSchema = writerFactory.getSchema(originalAttributes, reader.getSchema()); Record record; while ((record = reader.nextRecord()) != null) { final Set<Relationship> relationships = route(record, writeSchema, original, context, flowFileContext); numRecords.incrementAndGet(); for (final Relationship relationship : relationships) { final RecordSetWriter recordSetWriter; Tuple<FlowFile, RecordSetWriter> tuple = writers.get(relationship); if (tuple == null) { FlowFile outFlowFile = session.create(original); final OutputStream out = session.write(outFlowFile); recordSetWriter = writerFactory.createWriter(getLogger(), writeSchema, out); recordSetWriter.beginRecordSet(); tuple = new Tuple<>(outFlowFile, recordSetWriter); writers.put(relationship, tuple); } else { recordSetWriter = tuple.getValue(); } recordSetWriter.write(record); } } } catch (final SchemaNotFoundException | MalformedRecordException e) { throw new ProcessException("Could not parse incoming data", e); } } });
@Override public void process(final OutputStream out) throws IOException { final ResultSetRecordSet recordSet; final RecordSchema writeSchema; try { recordSet = new ResultSetRecordSet(rs, readerSchema); final RecordSchema resultSetSchema = recordSet.getSchema(); writeSchema = recordSetWriterFactory.getSchema(originalAttributes, resultSetSchema); } catch (final SQLException | SchemaNotFoundException e) { throw new ProcessException(e); } try (final RecordSetWriter resultSetWriter = recordSetWriterFactory.createWriter(getLogger(), writeSchema, out)) { writeResultRef.set(resultSetWriter.write(recordSet)); mimeTypeRef.set(resultSetWriter.getMimeType()); } catch (final Exception e) { throw new IOException(e); } } });
final RecordSchema outSchema = writerFactory.getSchema(attributes, reader.getSchema()); droppedRecordWriter = writerFactory.createWriter(log, outSchema, droppedOutputStream); droppedRecordWriter.beginRecordSet(); failedRecordWriter = writerFactory.createWriter(log, outSchema, failedOutputStream); failedRecordWriter.beginRecordSet(); successfulRecordWriter = writerFactory.createWriter(log, outSchema, successfulOutputStream); successfulRecordWriter.beginRecordSet(); droppedRecordWriter.finishRecordSet(); droppedRecordWriter.close(); } catch (IOException ioe) { log.error("Error closing FlowFile with dropped records: {}", new Object[]{ioe.getMessage()}, ioe); failedRecordWriter.finishRecordSet(); failedRecordWriter.close(); } catch (IOException ioe) { log.error("Error closing FlowFile with failed records: {}", new Object[]{ioe.getMessage()}, ioe); successfulRecordWriter.finishRecordSet(); successfulRecordWriter.close(); } catch (IOException ioe) { log.error("Error closing FlowFile with successful records: {}", new Object[]{ioe.getMessage()}, ioe);
private void closeWriter(final RecordSetWriter writer) { try { if (writer != null) { writer.close(); } } catch (final Exception ioe) { logger.warn("Failed to close Record Writer", ioe); } }
((RawRecordWriter) writer).writeRawRecord(record); } else { writer.write(record);
private RecordSetWriter createIfNecessary(final RecordSetWriter writer, final RecordSetWriterFactory factory, final ProcessSession session, final FlowFile flowFile, final RecordSchema outputSchema) throws SchemaNotFoundException, IOException { if (writer != null) { return writer; } final OutputStream out = session.write(flowFile); final RecordSetWriter created = factory.createWriter(getLogger(), outputSchema, out); created.beginRecordSet(); return created; }
recordWriter.beginRecordSet(); recordWriter.write(record); recordCount++; recordWriter.close();
writer.beginRecordSet(); writer.write(record); } catch (final RuntimeException re) { handleParseFailure(consumerRecord, session, re, "Failed to write message from Kafka using the configured Record Writer. "
flowFileResponse = session.write(flowFileResponse, out -> { try (final RecordSetWriter writer = writerFactory.createWriter(getLogger(), schema, out)) { writer.write(recordSet); writer.flush(); mimeType.append(writer.getMimeType()); } catch (SchemaNotFoundException e) { throw new ProcessException("Could not parse Solr response", e);
@Override public long writeResultSet(ResultSet resultSet, OutputStream outputStream, ComponentLog logger, AbstractQueryDatabaseTable.MaxValueResultSetRowCollector callback) throws Exception { final RecordSet recordSet; try { if (fullRecordSet == null) { final Schema avroSchema = JdbcCommon.createSchema(resultSet, options); final RecordSchema recordAvroSchema = AvroTypeUtil.createSchema(avroSchema); fullRecordSet = new ResultSetRecordSetWithCallback(resultSet, recordAvroSchema, callback); writeSchema = recordSetWriterFactory.getSchema(originalAttributes, fullRecordSet.getSchema()); } recordSet = (maxRowsPerFlowFile > 0) ? fullRecordSet.limit(maxRowsPerFlowFile) : fullRecordSet; } catch (final SQLException | SchemaNotFoundException | IOException e) { throw new ProcessException(e); } try (final RecordSetWriter resultSetWriter = recordSetWriterFactory.createWriter(logger, writeSchema, outputStream)) { writeResultRef.set(resultSetWriter.write(recordSet)); if (mimeType == null) { mimeType = resultSetWriter.getMimeType(); } return writeResultRef.get().getRecordCount(); } catch (final Exception e) { throw new IOException(e); } }
@Override public void writeEmptyResultSet(OutputStream outputStream, ComponentLog logger) throws IOException { try (final RecordSetWriter resultSetWriter = recordSetWriterFactory.createWriter(logger, writeSchema, outputStream)) { mimeType = resultSetWriter.getMimeType(); resultSetWriter.beginRecordSet(); resultSetWriter.finishRecordSet(); } catch (final Exception e) { throw new IOException(e); } }
final RecordSchema outSchema = writerFactory.getSchema(attributes, reader.getSchema()); droppedRecordWriter = writerFactory.createWriter(log, outSchema, droppedOutputStream); droppedRecordWriter.beginRecordSet(); failedRecordWriter = writerFactory.createWriter(log, outSchema, failedOutputStream); failedRecordWriter.beginRecordSet(); successfulRecordWriter = writerFactory.createWriter(log, outSchema, successfulOutputStream); successfulRecordWriter.beginRecordSet(); droppedRecordWriter.finishRecordSet(); droppedRecordWriter.close(); } catch (IOException ioe) { log.error("Error closing FlowFile with dropped records: {}", new Object[]{ioe.getMessage()}, ioe); failedRecordWriter.finishRecordSet(); failedRecordWriter.close(); } catch (IOException ioe) { log.error("Error closing FlowFile with failed records: {}", new Object[]{ioe.getMessage()}, ioe); successfulRecordWriter.finishRecordSet(); successfulRecordWriter.close(); } catch (IOException ioe) { log.error("Error closing FlowFile with successful records: {}", new Object[]{ioe.getMessage()}, ioe);
private void closeWriter(final RecordSetWriter writer) { try { if (writer != null) { writer.close(); } } catch (final Exception ioe) { logger.warn("Failed to close Record Writer", ioe); } }
((RawRecordWriter) writer).writeRawRecord(record); } else { writer.write(record);