@Override public ContentClaim getCurrentClaim() { return (getCurrent() == null) ? null : getCurrent().getContentClaim(); }
@Override public FlowFile clone(FlowFile example, final long offset, final long size) { verifyTaskActive(); example = validateRecordState(example); final StandardRepositoryRecord exampleRepoRecord = getRecord(example); final FlowFileRecord currRec = exampleRepoRecord.getCurrent(); final ContentClaim claim = exampleRepoRecord.getCurrentClaim(); if (offset + size > example.getSize()) { throw new FlowFileHandlingException("Specified offset of " + offset + " and size " + size + " exceeds size of " + example.toString()); } final StandardFlowFileRecord.Builder builder = new StandardFlowFileRecord.Builder().fromFlowFile(currRec); builder.id(context.getNextFlowFileSequence()); builder.contentClaimOffset(currRec.getContentClaimOffset() + offset); builder.size(size); final String newUuid = UUID.randomUUID().toString(); builder.addAttribute(CoreAttributes.UUID.key(), newUuid); final FlowFileRecord clone = builder.build(); if (claim != null) { context.getContentRepository().incrementClaimaintCount(claim); } final StandardRepositoryRecord record = new StandardRepositoryRecord(null); record.setWorking(clone, clone.getAttributes()); records.put(clone.getId(), record); if (offset == 0L && size == example.getSize()) { provenanceReporter.clone(example, clone); } else { registerForkEvent(example, clone); } return clone; }
if (repoRecord.getCurrent() != null && repoRecord.getCurrentClaim() != null) { final ContentClaim currentClaim = repoRecord.getCurrentClaim(); final long currentOffset = repoRecord.getCurrentClaimOffset();
@Override public long getCurrentClaimOffset() { return (getCurrent() == null) ? 0L : getCurrent().getContentClaimOffset(); }
private void setType(final RepositoryRecordType newType) { if (newType == this.type) { return; } if (this.type == RepositoryRecordType.CREATE) { // Because we don't copy updated attributes to `this.updatedAttributes` for CREATE records, we need to ensure // that if a record is changed from CREATE to anything else that we do properly update the `this.updatedAttributes` field. this.updatedAttributes = new HashMap<>(getCurrent().getAttributes()); } this.type = newType; }
Map<String, String> getUpdatedAttributes() { if (type == RepositoryRecordType.CREATE) { return getCurrent().getAttributes(); } return updatedAttributes == null ? Collections.emptyMap() : updatedAttributes; }
@Override public String toString() { return "StandardRepositoryRecord[UpdateType=" + getType() + ",Record=" + getCurrent() + "]"; }
private FlowFile getMostRecent(final FlowFile flowFile) { final StandardRepositoryRecord existingRecord = getRecord(flowFile); return existingRecord == null ? flowFile : existingRecord.getCurrent(); }
private void requireFlowFileQueue(final StandardRepositoryRecord repoRecord, final String queueId) { if (queueId == null || queueId.trim().isEmpty()) { logger.warn("{} does not have a Queue associated with it; this record will be discarded", repoRecord.getCurrent()); repoRecord.markForAbort(); } else if (repoRecord.getOriginalQueue() == null) { logger.warn("{} maps to unknown Queue {}; this record will be discarded", repoRecord.getCurrent(), queueId); repoRecord.markForAbort(); } }
private void updateLastQueuedDate(final StandardRepositoryRecord record, final Long lastQueueDate) { final FlowFileRecord newFile = new StandardFlowFileRecord.Builder().fromFlowFile(record.getCurrent()) .lastQueued(lastQueueDate, enqueuedIndex.getAndIncrement()).build(); record.setWorking(newFile); }
@Override public FlowFile removeAllAttributes(FlowFile flowFile, final Pattern keyPattern) { verifyTaskActive(); flowFile = validateRecordState(flowFile); final StandardRepositoryRecord record = getRecord(flowFile); final FlowFileRecord newFile = new StandardFlowFileRecord.Builder().fromFlowFile(record.getCurrent()).removeAttributes(keyPattern).build(); if (keyPattern == null) { record.setWorking(newFile); } else { final Map<String, String> curAttrs = record.getCurrent().getAttributes(); final Map<String, String> removed = new HashMap<>(); for (final String key : curAttrs.keySet()) { if (CoreAttributes.UUID.key().equals(key)) { continue; } if (keyPattern.matcher(key).matches()) { removed.put(key, null); } } record.setWorking(newFile, removed); } return newFile; }
@Override public FlowFile removeAllAttributes(FlowFile flowFile, final Set<String> keys) { verifyTaskActive(); flowFile = validateRecordState(flowFile); if (keys == null) { return flowFile; } final StandardRepositoryRecord record = getRecord(flowFile); final FlowFileRecord newFile = new StandardFlowFileRecord.Builder().fromFlowFile(record.getCurrent()).removeAttributes(keys).build(); final Map<String, String> updatedAttrs = new HashMap<>(); for (final String key : keys) { if (CoreAttributes.UUID.key().equals(key)) { continue; } updatedAttrs.put(key, null); } record.setWorking(newFile, updatedAttrs); return newFile; }
private FlowFile validateRecordState(final FlowFile flowFile, final boolean allowRecursiveRead) { if (!allowRecursiveRead && readRecursionSet.containsKey(flowFile)) { throw new IllegalStateException(flowFile + " already in use for an active callback or an InputStream created by ProcessSession.read(FlowFile) has not been closed"); } if (writeRecursionSet.contains(flowFile)) { throw new IllegalStateException(flowFile + " already in use for an active callback or an OutputStream created by ProcessSession.write(FlowFile) has not been closed"); } final StandardRepositoryRecord record = getRecord(flowFile); if (record == null) { rollback(); throw new FlowFileHandlingException(flowFile + " is not known in this session (" + toString() + ")"); } if (record.getTransferRelationship() != null) { rollback(); throw new FlowFileHandlingException(flowFile + " is already marked for transfer"); } if (record.isMarkedForDelete()) { rollback(); throw new FlowFileHandlingException(flowFile + " has already been marked for removal"); } return record.getCurrent(); }
@Override public FlowFile putAllAttributes(FlowFile flowFile, final Map<String, String> attributes) { verifyTaskActive(); flowFile = validateRecordState(flowFile); final StandardRepositoryRecord record = getRecord(flowFile); final Map<String, String> updatedAttributes; if (attributes.containsKey(CoreAttributes.UUID.key())) { updatedAttributes = new HashMap<>(attributes); updatedAttributes.remove(CoreAttributes.UUID.key()); } else { updatedAttributes = attributes; } final StandardFlowFileRecord.Builder ffBuilder = new StandardFlowFileRecord.Builder().fromFlowFile(record.getCurrent()).addAttributes(updatedAttributes); final FlowFileRecord newFile = ffBuilder.build(); record.setWorking(newFile, updatedAttributes); return newFile; }
@Override public void close() throws IOException { if (closed) { return; } closed = true; writeRecursionSet.remove(sourceFlowFile); final long bytesWritten = countingOut.getBytesWritten(); StandardProcessSession.this.bytesWritten += bytesWritten; final OutputStream removed = openOutputStreams.remove(sourceFlowFile); if (removed == null) { LOG.error("Closed Session's OutputStream but there was no entry for it in the map; sourceFlowFile={}; map={}", sourceFlowFile, openOutputStreams); } flush(); removeTemporaryClaim(record); final FlowFileRecord newFile = new StandardFlowFileRecord.Builder() .fromFlowFile(record.getCurrent()) .contentClaim(updatedClaim) .contentClaimOffset(Math.max(0, updatedClaim.getLength() - bytesWritten)) .size(bytesWritten) .build(); record.setWorking(newFile); } };
@Override public FlowFile putAttribute(FlowFile flowFile, final String key, final String value) { verifyTaskActive(); flowFile = validateRecordState(flowFile); if (CoreAttributes.UUID.key().equals(key)) { return flowFile; } final StandardRepositoryRecord record = getRecord(flowFile); final FlowFileRecord newFile = new StandardFlowFileRecord.Builder().fromFlowFile(record.getCurrent()).addAttribute(key, value).build(); record.setWorking(newFile, key, value); return newFile; }
@Override public FlowFile removeAttribute(FlowFile flowFile, final String key) { verifyTaskActive(); flowFile = validateRecordState(flowFile); if (CoreAttributes.UUID.key().equals(key)) { return flowFile; } final StandardRepositoryRecord record = getRecord(flowFile); final FlowFileRecord newFile = new StandardFlowFileRecord.Builder().fromFlowFile(record.getCurrent()).removeAttributes(key).build(); record.setWorking(newFile, key, null); return newFile; }
@Override public FlowFile penalize(FlowFile flowFile) { verifyTaskActive(); flowFile = validateRecordState(flowFile); final StandardRepositoryRecord record = getRecord(flowFile); final long expirationEpochMillis = System.currentTimeMillis() + context.getConnectable().getPenalizationPeriod(TimeUnit.MILLISECONDS); final FlowFileRecord newFile = new StandardFlowFileRecord.Builder().fromFlowFile(record.getCurrent()).penaltyExpirationTime(expirationEpochMillis).build(); record.setWorking(newFile); return newFile; }
.fromFlowFile(record.getCurrent()) .contentClaim(newClaim) .contentClaimOffset(claimOffset)
private void handleContentNotFound(final ContentNotFoundException nfe, final StandardRepositoryRecord suspectRecord) { final ContentClaim registeredClaim = suspectRecord.getOriginalClaim(); final ContentClaim transientClaim = suspectRecord.getWorkingClaim(); final ContentClaim missingClaim = nfe.getMissingClaim(); final ProvenanceEventRecord dropEvent = provenanceReporter.drop(suspectRecord.getCurrent(), nfe.getMessage() == null ? "Content Not Found" : nfe.getMessage()); if (dropEvent != null) { context.getProvenanceRepository().registerEvent(dropEvent); } if (missingClaim == registeredClaim) { suspectRecord.markForAbort(); rollback(); throw new MissingFlowFileException("Unable to find content for FlowFile", nfe); } if (missingClaim == transientClaim) { rollback(); throw new MissingFlowFileException("Unable to find content for FlowFile", nfe); } }