@Override public Map<String, String> getAttributesToAdd() { Map<String, String> attributesToAdd = new HashMap<>(); attributesToAdd.put(CoreAttributes.MIME_TYPE.key(), mimeType); // Add any attributes from the record writer (if present) final WriteResult result = writeResultRef.get(); if (result != null) { if (result.getAttributes() != null) { attributesToAdd.putAll(result.getAttributes()); } attributesToAdd.put("record.count", String.valueOf(result.getRecordCount())); } return attributesToAdd; }
@Override public WriteResult finishRecordSet() throws IOException { return WriteResult.of(recordCount, Collections.emptyMap()); } };
protected byte[] getData(final ReportingContext context, InputStream in, Map<String, String> attributes) { try (final JsonRecordReader reader = new JsonRecordReader(in, recordSchema)) { final RecordSetWriterFactory writerFactory = context.getProperty(RECORD_WRITER).asControllerService(RecordSetWriterFactory.class); final RecordSchema writeSchema = writerFactory.getSchema(null, recordSchema); final ByteArrayOutputStream out = new ByteArrayOutputStream(); try (final RecordSetWriter writer = writerFactory.createWriter(getLogger(), writeSchema, out)) { writer.beginRecordSet(); Record record; while ((record = reader.nextRecord()) != null) { writer.write(record); } final WriteResult writeResult = writer.finishRecordSet(); attributes.put(CoreAttributes.MIME_TYPE.key(), writer.getMimeType()); attributes.putAll(writeResult.getAttributes()); } return out.toByteArray(); } catch (IOException | SchemaNotFoundException | MalformedRecordException e) { throw new ProcessException("Failed to write metrics using record writer: " + e.getMessage(), e); } }
@Override public void updateCounters(ProcessSession session) { final WriteResult result = writeResultRef.get(); if (result != null) { session.adjustCounter("Records Written", result.getRecordCount(), false); } }
try (final RecordSetWriter writer = writerFactory.createWriter(logger, schema, baos)) { final WriteResult writeResult = writer.write(record); additionalAttributes = writeResult.getAttributes(); writer.flush();
@Override public long writeResultSet(ResultSet resultSet, OutputStream outputStream, ComponentLog logger, AbstractQueryDatabaseTable.MaxValueResultSetRowCollector callback) throws Exception { final RecordSet recordSet; try { if (fullRecordSet == null) { final Schema avroSchema = JdbcCommon.createSchema(resultSet, options); final RecordSchema recordAvroSchema = AvroTypeUtil.createSchema(avroSchema); fullRecordSet = new ResultSetRecordSetWithCallback(resultSet, recordAvroSchema, callback); writeSchema = recordSetWriterFactory.getSchema(originalAttributes, fullRecordSet.getSchema()); } recordSet = (maxRowsPerFlowFile > 0) ? fullRecordSet.limit(maxRowsPerFlowFile) : fullRecordSet; } catch (final SQLException | SchemaNotFoundException | IOException e) { throw new ProcessException(e); } try (final RecordSetWriter resultSetWriter = recordSetWriterFactory.createWriter(logger, writeSchema, outputStream)) { writeResultRef.set(resultSetWriter.write(recordSet)); if (mimeType == null) { mimeType = resultSetWriter.getMimeType(); } return writeResultRef.get().getRecordCount(); } catch (final Exception e) { throw new IOException(e); } }
private void completeFlowFile(final ProcessSession session, final FlowFile flowFile, final RecordSetWriter writer, final Relationship relationship, final String details) throws IOException { final WriteResult writeResult = writer.finishRecordSet(); writer.close(); final Map<String, String> attributes = new HashMap<>(); attributes.putAll(writeResult.getAttributes()); attributes.put("record.count", String.valueOf(writeResult.getRecordCount())); attributes.put(CoreAttributes.MIME_TYPE.key(), writer.getMimeType()); session.putAllAttributes(flowFile, attributes); session.transfer(flowFile, relationship); session.getProvenanceReporter().route(flowFile, relationship, details); }
try (final RecordSetWriter writer = writerFactory.createWriter(logger, schema, baos)) { final WriteResult writeResult = writer.write(record); additionalAttributes = writeResult.getAttributes(); writer.flush();
@Override public final WriteResult write(final Record record) throws IOException { final Map<String, String> attributes = writeRecord(record); return WriteResult.of(++recordCount, attributes); }
@Override public void updateCounters(ProcessSession session) { final WriteResult result = writeResultRef.get(); if (result != null) { session.adjustCounter("Records Written", result.getRecordCount(), false); } }
private boolean processBundle(final BundleTracker bundle) throws IOException { final RecordSetWriter writer = bundle.recordWriter; if (writer != null) { final WriteResult writeResult; try { writeResult = writer.finishRecordSet(); } finally { writer.close(); } if (writeResult.getRecordCount() == 0) { getProcessSession().remove(bundle.flowFile); return false; } final Map<String, String> attributes = new HashMap<>(); attributes.putAll(writeResult.getAttributes()); attributes.put(CoreAttributes.MIME_TYPE.key(), writer.getMimeType()); bundle.flowFile = getProcessSession().putAllAttributes(bundle.flowFile, attributes); } populateAttributes(bundle); return true; }
try (final RecordSetWriter writer = writerFactory.createWriter(logger, schema, baos)) { final WriteResult writeResult = writer.write(record); additionalAttributes = writeResult.getAttributes(); writer.flush();
@Override public final WriteResult finishRecordSet() throws IOException { if (!isActiveRecordSet()) { throw new IllegalStateException("Cannot finish RecordSet because no RecordSet has begun"); } final Map<String, String> attributes = onFinishRecordSet(); return WriteResult.of(recordCount, attributes == null ? Collections.emptyMap() : attributes); }
@Override public long writeResultSet(ResultSet resultSet, OutputStream outputStream, ComponentLog logger, AbstractQueryDatabaseTable.MaxValueResultSetRowCollector callback) throws Exception { final RecordSet recordSet; try { if (fullRecordSet == null) { final Schema avroSchema = JdbcCommon.createSchema(resultSet, options); final RecordSchema recordAvroSchema = AvroTypeUtil.createSchema(avroSchema); fullRecordSet = new ResultSetRecordSetWithCallback(resultSet, recordAvroSchema, callback); writeSchema = recordSetWriterFactory.getSchema(originalAttributes, fullRecordSet.getSchema()); } recordSet = (maxRowsPerFlowFile > 0) ? fullRecordSet.limit(maxRowsPerFlowFile) : fullRecordSet; } catch (final SQLException | SchemaNotFoundException | IOException e) { throw new ProcessException(e); } try (final RecordSetWriter resultSetWriter = recordSetWriterFactory.createWriter(logger, writeSchema, outputStream)) { writeResultRef.set(resultSetWriter.write(recordSet)); if (mimeType == null) { mimeType = resultSetWriter.getMimeType(); } return writeResultRef.get().getRecordCount(); } catch (final Exception e) { throw new IOException(e); } }
private boolean processBundle(final BundleTracker bundle) throws IOException { final RecordSetWriter writer = bundle.recordWriter; if (writer != null) { final WriteResult writeResult; try { writeResult = writer.finishRecordSet(); } finally { writer.close(); } if (writeResult.getRecordCount() == 0) { getProcessSession().remove(bundle.flowFile); return false; } final Map<String, String> attributes = new HashMap<>(); attributes.putAll(writeResult.getAttributes()); attributes.put(CoreAttributes.MIME_TYPE.key(), writer.getMimeType()); bundle.flowFile = getProcessSession().putAllAttributes(bundle.flowFile, attributes); } populateAttributes(bundle); return true; }
protected byte[] getData(final ReportingContext context, InputStream in, Map<String, String> attributes) { try (final JsonRecordReader reader = new JsonRecordReader(in, recordSchema)) { final RecordSetWriterFactory writerFactory = context.getProperty(RECORD_WRITER).asControllerService(RecordSetWriterFactory.class); final RecordSchema writeSchema = writerFactory.getSchema(null, recordSchema); final ByteArrayOutputStream out = new ByteArrayOutputStream(); try (final RecordSetWriter writer = writerFactory.createWriter(getLogger(), writeSchema, out)) { writer.beginRecordSet(); Record record; while ((record = reader.nextRecord()) != null) { writer.write(record); } final WriteResult writeResult = writer.finishRecordSet(); attributes.put(CoreAttributes.MIME_TYPE.key(), writer.getMimeType()); attributes.putAll(writeResult.getAttributes()); } return out.toByteArray(); } catch (IOException | SchemaNotFoundException | MalformedRecordException e) { throw new ProcessException("Failed to write metrics using record writer: " + e.getMessage(), e); } }
/** * @param recordSet the RecordSet to write * @return the result of writing the record set * @throws IOException if an I/O error happens reading from the RecordSet, or writing a Record */ default WriteResult write(final RecordSet recordSet) throws IOException { int recordCount = 0; Record record; while ((record = recordSet.next()) != null) { write(record); recordCount++; } return WriteResult.of(recordCount, Collections.emptyMap()); }
private boolean processBundle(final BundleTracker bundle) throws IOException { final RecordSetWriter writer = bundle.recordWriter; if (writer != null) { final WriteResult writeResult; try { writeResult = writer.finishRecordSet(); } finally { writer.close(); } if (writeResult.getRecordCount() == 0) { getProcessSession().remove(bundle.flowFile); return false; } final Map<String, String> attributes = new HashMap<>(); attributes.putAll(writeResult.getAttributes()); attributes.put(CoreAttributes.MIME_TYPE.key(), writer.getMimeType()); bundle.flowFile = getProcessSession().putAllAttributes(bundle.flowFile, attributes); } populateAttributes(bundle); return true; }
try (final RecordSetWriter writer = writerFactory.createWriter(logger, schema, baos)) { final WriteResult writeResult = writer.write(record); additionalAttributes = writeResult.getAttributes(); writer.flush();
@Override public WriteResult write(Record record) throws IOException { if (++recordCount > failAfterN && failAfterN > -1) { throw new IOException("Unit Test intentionally throwing IOException after " + failAfterN + " records were written"); } if (header != null && !headerWritten) { out.write(header.getBytes()); out.write("\n".getBytes()); headerWritten = true; } final int numCols = record.getSchema().getFieldCount(); int i = 0; for (final String fieldName : record.getSchema().getFieldNames()) { final String val = record.getAsString(fieldName); if (val != null) { if (quoteValues) { out.write("\"".getBytes()); out.write(val.getBytes()); out.write("\"".getBytes()); } else { out.write(val.getBytes()); } } if (i++ < numCols - 1) { out.write(",".getBytes()); } } out.write("\n".getBytes()); return WriteResult.of(1, Collections.emptyMap()); }