@Override public long contentLength(){ return useChunked ? -1 : requestFlowFile.getSize(); } };
@Override public long getSize() { return flowFile.getSize(); }
/** * Check if flow if corrupted (either flow file is empty or does not have a key attribute) * @param flowFile the flow file to check * @param key the cache key * @return <code>true</code> if flow file is incomplete */ private boolean isFailedFlowFile(FlowFile flowFile, String key) { if ( StringUtils.isEmpty(key) ) { return true; } return flowFile.getSize() == 0; }
protected boolean checkMinimumAlgorithmRequirements(String algorithm, FlowFile flowFile) { // Check if content matches minimum length requirement if (algorithm.equals(tlsh) && flowFile.getSize() < 512 ) { return false; } else { return true; } }
private FlowFile updateAttributes(ProcessSession processSession, FlowFile splitFlowFile, long splitLineCount, long splitFlowFileSize, String splitId, int splitIndex, String origFileName) { Map<String, String> attributes = new HashMap<>(); attributes.put(SPLIT_LINE_COUNT, String.valueOf(splitLineCount)); attributes.put(FRAGMENT_SIZE, String.valueOf(splitFlowFile.getSize())); attributes.put(FRAGMENT_ID, splitId); attributes.put(FRAGMENT_INDEX, String.valueOf(splitIndex)); attributes.put(SEGMENT_ORIGINAL_FILENAME, origFileName); return processSession.putAllAttributes(splitFlowFile, attributes); }
/** * Extracts contents of the {@link FlowFile} to byte array. */ private byte[] extractMessage(FlowFile flowFile, ProcessSession processSession) { final byte[] messageContent = new byte[(int) flowFile.getSize()]; processSession.read(flowFile, new InputStreamCallback() { @Override public void process(final InputStream in) throws IOException { StreamUtils.fillBuffer(in, messageContent, true); } }); return messageContent; }
/** * Extracts contents of the {@link FlowFile} as byte array. */ private byte[] extractMessage(FlowFile flowFile, ProcessSession session){ final byte[] messageContent = new byte[(int) flowFile.getSize()]; session.read(flowFile, new InputStreamCallback() { @Override public void process(final InputStream in) throws IOException { StreamUtils.fillBuffer(in, messageContent, true); } }); return messageContent; }
private static String formatAttributes(final FlowFile flowFile, final String messagePrepend) { StringBuilder message = new StringBuilder(messagePrepend); message.append(BODY_SEPARATOR); message.append("\nStandard FlowFile Metadata:"); message.append(String.format("\n\t%1$s = '%2$s'", "id", flowFile.getAttribute(CoreAttributes.UUID.key()))); message.append(String.format("\n\t%1$s = '%2$s'", "entryDate", new Date(flowFile.getEntryDate()))); message.append(String.format("\n\t%1$s = '%2$s'", "fileSize", flowFile.getSize())); message.append("\nFlowFile Attributes:"); for (Entry<String, String> attribute : flowFile.getAttributes().entrySet()) { message.append(String.format("\n\t%1$s = '%2$s'", attribute.getKey(), attribute.getValue())); } message.append("\n"); return message.toString(); }
static final Map<String, String> extractFlowFileProperties(final FlowFile flowFile) { final Map<String, String> flowFileProps = new HashMap<>(); flowFileProps.put("flowFileId", String.valueOf(flowFile.getId())); flowFileProps.put("fileSize", String.valueOf(flowFile.getSize())); flowFileProps.put("entryDate", String.valueOf(flowFile.getEntryDate())); flowFileProps.put("lineageStartDate", String.valueOf(flowFile.getLineageStartDate())); flowFileProps.put("lastQueueDate", String.valueOf(flowFile.getLastQueueDate())); flowFileProps.put("queueDateIndex", String.valueOf(flowFile.getQueueDateIndex())); return flowFileProps; }
@Override public long getContentLength() { if (compressionLevel == 0 && !sendAsFlowFile && !context.getProperty(CHUNKED_ENCODING).asBoolean()) { return toSend.get(0).getSize(); } else { return -1; } } };
/** * Determines the HiveQL statement that should be executed for the given FlowFile * * @param session the session that can be used to access the given FlowFile * @param flowFile the FlowFile whose HiveQL statement should be executed * @return the HiveQL that is associated with the given FlowFile */ protected String getHiveQL(final ProcessSession session, final FlowFile flowFile, final Charset charset) { // Read the HiveQL from the FlowFile's content final byte[] buffer = new byte[(int) flowFile.getSize()]; session.read(flowFile, in -> StreamUtils.fillBuffer(in, buffer)); // Create the PreparedStatement to use for this FlowFile. return new String(buffer, charset); }
/** * Extracts contents of the {@link FlowFile} as byte array. */ private byte[] extractMessageBody(FlowFile flowFile, ProcessSession session) { final byte[] messageContent = new byte[(int) flowFile.getSize()]; session.read(flowFile, in -> StreamUtils.fillBuffer(in, messageContent, true)); return messageContent; }
protected void processInputStream(InputStream stream, FlowFile flowFile, final Writer writer) throws IOException { int fileSize = (int) flowFile.getSize(); final InputStreamWritable inStreamWritable = new InputStreamWritable(new BufferedInputStream(stream), fileSize); String key = flowFile.getAttribute(CoreAttributes.FILENAME.key()); writer.append(new Text(key), inStreamWritable); } }
@Override public FlowFile clone(FlowFile example) { verifyTaskActive(); example = validateRecordState(example); return clone(example, 0L, example.getSize()); }
@Override public void process(final InputStream in) throws IOException { final DataPacket dataPacket = new StandardDataPacket(toSend.getAttributes(), in, toSend.getSize()); codec.encode(dataPacket, checkedOutputStream); } });
@Override public void process(final InputStream in) throws IOException { final DataPacket dataPacket = new StandardDataPacket(toWrap.getAttributes(), in, toWrap.getSize()); transaction.send(dataPacket); } });
@Override public MockFlowFile clone(FlowFile flowFile, final long offset, final long size) { flowFile = validateState(flowFile); if (offset + size > flowFile.getSize()) { throw new FlowFileHandlingException("Specified offset of " + offset + " and size " + size + " exceeds size of " + flowFile.toString()); } final MockFlowFile newFlowFile = new MockFlowFile(sharedState.nextFlowFileId(), flowFile); final byte[] newContent = Arrays.copyOfRange(((MockFlowFile) flowFile).getData(), (int) offset, (int) (offset + size)); newFlowFile.setData(newContent); currentVersions.put(newFlowFile.getId(), newFlowFile); beingProcessed.add(newFlowFile.getId()); return newFlowFile; }
protected FlowFile handleFlowFileTooBig(final ProcessSession session, FlowFile flowFileCandidate, String message) { flowFileCandidate = session.putAttribute(flowFileCandidate, message, "record too big " + flowFileCandidate.getSize() + " max allowed " + MAX_MESSAGE_SIZE ); session.transfer(flowFileCandidate, REL_FAILURE); getLogger().error("Failed to publish to kinesis records {} because the size was greater than {} bytes", new Object[]{flowFileCandidate, MAX_MESSAGE_SIZE}); return flowFileCandidate; }
@Override public ProvenanceEventBuilder fromFlowFile(final FlowFile flowFile) { setFlowFileEntryDate(flowFile.getEntryDate()); setLineageStartDate(flowFile.getLineageStartDate()); setAttributes(Collections.emptyMap(), flowFile.getAttributes()); uuid = flowFile.getAttribute(CoreAttributes.UUID.key()); this.contentSize = flowFile.getSize(); return this; }