private void updateLastQueuedDate(final StandardRepositoryRecord record, final Long lastQueueDate) { final FlowFileRecord newFile = new StandardFlowFileRecord.Builder().fromFlowFile(record.getCurrent()) .lastQueued(lastQueueDate, enqueuedIndex.getAndIncrement()).build(); record.setWorking(newFile); }
@Override public FlowFile removeAllAttributes(FlowFile flowFile, final Pattern keyPattern) { verifyTaskActive(); flowFile = validateRecordState(flowFile); final StandardRepositoryRecord record = getRecord(flowFile); final FlowFileRecord newFile = new StandardFlowFileRecord.Builder().fromFlowFile(record.getCurrent()).removeAttributes(keyPattern).build(); if (keyPattern == null) { record.setWorking(newFile); } else { final Map<String, String> curAttrs = record.getCurrent().getAttributes(); final Map<String, String> removed = new HashMap<>(); for (final String key : curAttrs.keySet()) { if (CoreAttributes.UUID.key().equals(key)) { continue; } if (keyPattern.matcher(key).matches()) { removed.put(key, null); } } record.setWorking(newFile, removed); } return newFile; }
@Override public FlowFile create() { verifyTaskActive(); final Map<String, String> attrs = new HashMap<>(); final String uuid = UUID.randomUUID().toString(); attrs.put(CoreAttributes.FILENAME.key(), uuid); attrs.put(CoreAttributes.PATH.key(), DEFAULT_FLOWFILE_PATH); attrs.put(CoreAttributes.UUID.key(), uuid); final FlowFileRecord fFile = new StandardFlowFileRecord.Builder().id(context.getNextFlowFileSequence()) .addAttributes(attrs) .build(); final StandardRepositoryRecord record = new StandardRepositoryRecord(null); record.setWorking(fFile, attrs); records.put(fFile.getId(), record); createdFlowFiles.add(fFile.getAttribute(CoreAttributes.UUID.key())); return fFile; }
@Override public FlowFile removeAllAttributes(FlowFile flowFile, final Set<String> keys) { verifyTaskActive(); flowFile = validateRecordState(flowFile); if (keys == null) { return flowFile; } final StandardRepositoryRecord record = getRecord(flowFile); final FlowFileRecord newFile = new StandardFlowFileRecord.Builder().fromFlowFile(record.getCurrent()).removeAttributes(keys).build(); final Map<String, String> updatedAttrs = new HashMap<>(); for (final String key : keys) { if (CoreAttributes.UUID.key().equals(key)) { continue; } updatedAttrs.put(key, null); } record.setWorking(newFile, updatedAttrs); return newFile; }
record.setWorking(fFile, newAttributes); records.put(fFile.getId(), record); createdFlowFiles.add(fFile.getAttribute(CoreAttributes.UUID.key()));
@Override public FlowFile putAllAttributes(FlowFile flowFile, final Map<String, String> attributes) { verifyTaskActive(); flowFile = validateRecordState(flowFile); final StandardRepositoryRecord record = getRecord(flowFile); final Map<String, String> updatedAttributes; if (attributes.containsKey(CoreAttributes.UUID.key())) { updatedAttributes = new HashMap<>(attributes); updatedAttributes.remove(CoreAttributes.UUID.key()); } else { updatedAttributes = attributes; } final StandardFlowFileRecord.Builder ffBuilder = new StandardFlowFileRecord.Builder().fromFlowFile(record.getCurrent()).addAttributes(updatedAttributes); final FlowFileRecord newFile = ffBuilder.build(); record.setWorking(newFile, updatedAttributes); return newFile; }
@Override public void close() throws IOException { if (closed) { return; } closed = true; writeRecursionSet.remove(sourceFlowFile); final long bytesWritten = countingOut.getBytesWritten(); StandardProcessSession.this.bytesWritten += bytesWritten; final OutputStream removed = openOutputStreams.remove(sourceFlowFile); if (removed == null) { LOG.error("Closed Session's OutputStream but there was no entry for it in the map; sourceFlowFile={}; map={}", sourceFlowFile, openOutputStreams); } flush(); removeTemporaryClaim(record); final FlowFileRecord newFile = new StandardFlowFileRecord.Builder() .fromFlowFile(record.getCurrent()) .contentClaim(updatedClaim) .contentClaimOffset(Math.max(0, updatedClaim.getLength() - bytesWritten)) .size(bytesWritten) .build(); record.setWorking(newFile); } };
@Override public FlowFile removeAttribute(FlowFile flowFile, final String key) { verifyTaskActive(); flowFile = validateRecordState(flowFile); if (CoreAttributes.UUID.key().equals(key)) { return flowFile; } final StandardRepositoryRecord record = getRecord(flowFile); final FlowFileRecord newFile = new StandardFlowFileRecord.Builder().fromFlowFile(record.getCurrent()).removeAttributes(key).build(); record.setWorking(newFile, key, null); return newFile; }
@Override public FlowFile putAttribute(FlowFile flowFile, final String key, final String value) { verifyTaskActive(); flowFile = validateRecordState(flowFile); if (CoreAttributes.UUID.key().equals(key)) { return flowFile; } final StandardRepositoryRecord record = getRecord(flowFile); final FlowFileRecord newFile = new StandardFlowFileRecord.Builder().fromFlowFile(record.getCurrent()).addAttribute(key, value).build(); record.setWorking(newFile, key, value); return newFile; }
record.setWorking(fFile, newAttributes); records.put(fFile.getId(), record); createdFlowFiles.add(fFile.getAttribute(CoreAttributes.UUID.key()));
.addAttribute(CoreAttributes.FILENAME.key(), source.toFile().getName()) .build(); record.setWorking(newFile, CoreAttributes.FILENAME.key(), source.toFile().getName());
@Override public FlowFile penalize(FlowFile flowFile) { verifyTaskActive(); flowFile = validateRecordState(flowFile); final StandardRepositoryRecord record = getRecord(flowFile); final long expirationEpochMillis = System.currentTimeMillis() + context.getConnectable().getPenalizationPeriod(TimeUnit.MILLISECONDS); final FlowFileRecord newFile = new StandardFlowFileRecord.Builder().fromFlowFile(record.getCurrent()).penaltyExpirationTime(expirationEpochMillis).build(); record.setWorking(newFile); return newFile; }
context.getContentRepository().incrementClaimaintCount(claim); newRecord.setWorking(clone, Collections.<String, String> emptyMap());
destinationRecord.setWorking(newFile); return newFile;
@Override public FlowFile clone(FlowFile example, final long offset, final long size) { verifyTaskActive(); example = validateRecordState(example); final StandardRepositoryRecord exampleRepoRecord = getRecord(example); final FlowFileRecord currRec = exampleRepoRecord.getCurrent(); final ContentClaim claim = exampleRepoRecord.getCurrentClaim(); if (offset + size > example.getSize()) { throw new FlowFileHandlingException("Specified offset of " + offset + " and size " + size + " exceeds size of " + example.toString()); } final StandardFlowFileRecord.Builder builder = new StandardFlowFileRecord.Builder().fromFlowFile(currRec); builder.id(context.getNextFlowFileSequence()); builder.contentClaimOffset(currRec.getContentClaimOffset() + offset); builder.size(size); final String newUuid = UUID.randomUUID().toString(); builder.addAttribute(CoreAttributes.UUID.key(), newUuid); final FlowFileRecord clone = builder.build(); if (claim != null) { context.getContentRepository().incrementClaimaintCount(claim); } final StandardRepositoryRecord record = new StandardRepositoryRecord(null); record.setWorking(clone, clone.getAttributes()); records.put(clone.getId(), record); if (offset == 0L && size == example.getSize()) { provenanceReporter.clone(example, clone); } else { registerForkEvent(example, clone); } return clone; }
.size(newSize) .build(); record.setWorking(newFile); return newFile;
record.setWorking(newFile); return newFile;
.build(); record.setWorking(newFile); return newFile;
record.setWorking(flowFileRecord); record.setDestination(queue); flowFileRepository.updateRepository(Collections.singleton(record));
.build(); record.setWorking(newFile);