public StandardRepositoryRecord(final FlowFileQueue originalQueue, final FlowFileRecord originalFlowFileRecord, final String swapLocation) { this.originalQueue = originalQueue; this.originalFlowFileRecord = originalFlowFileRecord; setType(RepositoryRecordType.SWAP_OUT); this.swapLocation = swapLocation; this.originalAttributes = originalFlowFileRecord == null ? Collections.emptyMap() : originalFlowFileRecord.getAttributes(); }
private void setType(final RepositoryRecordType newType) { if (newType == this.type) { return; } if (this.type == RepositoryRecordType.CREATE) { // Because we don't copy updated attributes to `this.updatedAttributes` for CREATE records, we need to ensure // that if a record is changed from CREATE to anything else that we do properly update the `this.updatedAttributes` field. this.updatedAttributes = new HashMap<>(getCurrent().getAttributes()); } this.type = newType; }
Map<String, String> getUpdatedAttributes() { if (type == RepositoryRecordType.CREATE) { return getCurrent().getAttributes(); } return updatedAttributes == null ? Collections.emptyMap() : updatedAttributes; }
@Override public void encode(final FlowFileRecord flowFile, final OutputStream destination) throws IOException { final DataOutputStream out = new DataOutputStream(destination); out.writeInt(flowFile.getAttributes().size()); for (final Map.Entry<String, String> entry : flowFile.getAttributes().entrySet()) { writeString(entry.getKey(), out); writeString(entry.getValue(), out); } out.writeLong(flowFile.getLineageStartDate()); out.writeLong(flowFile.getEntryDate()); }
@Override public FlowFileRecord getFlowFile(String id, String flowFileUuid) { try { final Connection connection = locateConnection(id); final FlowFileQueue queue = connection.getFlowFileQueue(); final FlowFileRecord flowFile = queue.getFlowFile(flowFileUuid); if (flowFile == null) { throw new ResourceNotFoundException(String.format("The FlowFile with UUID %s is no longer in the active queue.", flowFileUuid)); } // get the attributes and ensure appropriate access final Map<String, String> attributes = flowFile.getAttributes(); final Authorizable dataAuthorizable = new DataAuthorizable(connection.getSourceAuthorizable()); dataAuthorizable.authorize(authorizer, RequestAction.READ, NiFiUserUtils.getNiFiUser(), attributes); return flowFile; } catch (final IOException ioe) { logger.error(String.format("Unable to get the flowfile (%s) at this time.", flowFileUuid), ioe); throw new IllegalStateException("Unable to get the FlowFile at this time."); } }
final Map<String, String> attributes = flowFile.getAttributes(); out.writeInt(attributes.size()); for (final Map.Entry<String, String> entry : attributes.entrySet()) {
@Override public FlowFile removeAllAttributes(FlowFile flowFile, final Pattern keyPattern) { verifyTaskActive(); flowFile = validateRecordState(flowFile); final StandardRepositoryRecord record = getRecord(flowFile); final FlowFileRecord newFile = new StandardFlowFileRecord.Builder().fromFlowFile(record.getCurrent()).removeAttributes(keyPattern).build(); if (keyPattern == null) { record.setWorking(newFile); } else { final Map<String, String> curAttrs = record.getCurrent().getAttributes(); final Map<String, String> removed = new HashMap<>(); for (final String key : curAttrs.keySet()) { if (CoreAttributes.UUID.key().equals(key)) { continue; } if (keyPattern.matcher(key).matches()) { removed.put(key, null); } } record.setWorking(newFile, removed); } return newFile; }
final Map<String, String> attributes = flowFile.getAttributes(); final Authorizable dataAuthorizable = new DataAuthorizable(connection.getSourceAuthorizable()); dataAuthorizable.authorize(authorizer, RequestAction.READ, user, attributes);
@Override public Object getFieldValue(final String fieldName) { switch (fieldName) { case FlowFileSchema.ATTRIBUTES: return flowFile.getAttributes(); case FlowFileSchema.CONTENT_CLAIM: return contentClaim; case FlowFileSchema.ENTRY_DATE: return flowFile.getEntryDate(); case FlowFileSchema.FLOWFILE_SIZE: return flowFile.getSize(); case FlowFileSchema.LINEAGE_START_DATE: return flowFile.getLineageStartDate(); case FlowFileSchema.LINEAGE_START_INDEX: return flowFile.getLineageStartIndex(); case FlowFileSchema.QUEUE_DATE: return flowFile.getLastQueueDate(); case FlowFileSchema.QUEUE_DATE_INDEX: return flowFile.getQueueDateIndex(); case FlowFileSchema.RECORD_ID: return flowFile.getId(); } return null; }
public Builder fromFlowFile(final FlowFileRecord specFlowFile) { if (specFlowFile == null) { return this; } bId = specFlowFile.getId(); bEntryDate = specFlowFile.getEntryDate(); bLineageStartDate = specFlowFile.getLineageStartDate(); bLineageStartIndex = specFlowFile.getLineageStartIndex(); bLineageIdentifiers.clear(); bPenaltyExpirationMs = specFlowFile.getPenaltyExpirationMillis(); bSize = specFlowFile.getSize(); // If this is a StandardFlowFileRecord, access the attributes map directly. Do not use the // getAttributes() method, because that will wrap the original in an UnmodifiableMap. As a result, // a Processor that continually calls session.append() for instance will have a FlowFile whose attributes // Map is wrapped thousands of times until it hits a StackOverflowError. We want the getter to return // UnmodifiableMap, though, so that Processors cannot directly modify that Map. bAttributes = specFlowFile instanceof StandardFlowFileRecord ? ((StandardFlowFileRecord) specFlowFile).attributes : specFlowFile.getAttributes(); bAttributesCopied = false; bClaim = specFlowFile.getContentClaim(); bClaimOffset = specFlowFile.getContentClaimOffset(); bLastQueueDate = specFlowFile.getLastQueueDate(); bQueueDateIndex = specFlowFile.getQueueDateIndex(); return this; }
@Override public FlowFile clone(FlowFile example, final long offset, final long size) { verifyTaskActive(); example = validateRecordState(example); final StandardRepositoryRecord exampleRepoRecord = getRecord(example); final FlowFileRecord currRec = exampleRepoRecord.getCurrent(); final ContentClaim claim = exampleRepoRecord.getCurrentClaim(); if (offset + size > example.getSize()) { throw new FlowFileHandlingException("Specified offset of " + offset + " and size " + size + " exceeds size of " + example.toString()); } final StandardFlowFileRecord.Builder builder = new StandardFlowFileRecord.Builder().fromFlowFile(currRec); builder.id(context.getNextFlowFileSequence()); builder.contentClaimOffset(currRec.getContentClaimOffset() + offset); builder.size(size); final String newUuid = UUID.randomUUID().toString(); builder.addAttribute(CoreAttributes.UUID.key(), newUuid); final FlowFileRecord clone = builder.build(); if (claim != null) { context.getContentRepository().incrementClaimaintCount(claim); } final StandardRepositoryRecord record = new StandardRepositoryRecord(null); record.setWorking(clone, clone.getAttributes()); records.put(clone.getId(), record); if (offset == 0L && size == example.getSize()) { provenanceReporter.clone(example, clone); } else { registerForkEvent(example, clone); } return clone; }
.setEventType(ProvenanceEventType.DOWNLOAD) .setFlowFileUUID(flowFile.getAttribute(CoreAttributes.UUID.key())) .setAttributes(flowFile.getAttributes(), Collections.emptyMap()) .setTransitUri(requestUri) .setEventTime(System.currentTimeMillis())
private ProvenanceEventRecord createDropProvenanceEvent(final FlowFileRecord flowFile, final String requestor) { final ProvenanceEventBuilder builder = provRepository.eventBuilder(); builder.fromFlowFile(flowFile); builder.setEventType(ProvenanceEventType.DROP); builder.setLineageStartDate(flowFile.getLineageStartDate()); builder.setComponentId(getIdentifier()); builder.setComponentType("Connection"); builder.setAttributes(flowFile.getAttributes(), Collections.emptyMap()); builder.setDetails("FlowFile Queue emptied by " + requestor); builder.setSourceQueueIdentifier(getIdentifier()); final ContentClaim contentClaim = flowFile.getContentClaim(); if (contentClaim != null) { final ResourceClaim resourceClaim = contentClaim.getResourceClaim(); builder.setPreviousContentClaim(resourceClaim.getContainer(), resourceClaim.getSection(), resourceClaim.getId(), contentClaim.getOffset(), flowFile.getSize()); } return builder.build(); }
.addParentUuid(parentUUID) .setFlowFileUUID(parentUUID) .setAttributes(Collections.emptyMap(), flowFileRecord.getAttributes()) .setCurrentContentClaim(event.getContentClaimContainer(), event.getContentClaimSection(), event.getContentClaimIdentifier(), event.getContentClaimOffset(), event.getFileSize()) .setDetails("Replay requested by " + user.getIdentity())
/** * Creates a FlowFileDTO from the specified FlowFileRecord. * * @param record record * @return dto */ public FlowFileDTO createFlowFileDTO(final FlowFileRecord record) { final Date now = new Date(); final FlowFileDTO dto = new FlowFileDTO(); dto.setUuid(record.getAttribute(CoreAttributes.UUID.key())); dto.setFilename(record.getAttribute(CoreAttributes.FILENAME.key())); dto.setPenalized(record.isPenalized()); dto.setSize(record.getSize()); dto.setAttributes(record.getAttributes()); final long queuedDuration = now.getTime() - record.getLastQueueDate(); dto.setQueuedDuration(queuedDuration); final long age = now.getTime() - record.getLineageStartDate(); dto.setLineageDuration(age); final ContentClaim contentClaim = record.getContentClaim(); if (contentClaim != null) { final ResourceClaim resourceClaim = contentClaim.getResourceClaim(); dto.setContentClaimSection(resourceClaim.getSection()); dto.setContentClaimContainer(resourceClaim.getContainer()); dto.setContentClaimIdentifier(resourceClaim.getId()); dto.setContentClaimOffset(contentClaim.getOffset() + record.getContentClaimOffset()); dto.setContentClaimFileSizeBytes(record.getSize()); dto.setContentClaimFileSize(FormatUtils.formatDataSize(record.getSize())); } return dto; }
return record.getSwapLocation(); case FlowFileSchema.ATTRIBUTES: return flowFile.getAttributes(); case FlowFileSchema.ENTRY_DATE: return flowFile.getEntryDate();
private void setType(final RepositoryRecordType newType) { if (newType == this.type) { return; } if (this.type == RepositoryRecordType.CREATE) { // Because we don't copy updated attributes to `this.updatedAttributes` for CREATE records, we need to ensure // that if a record is changed from CREATE to anything else that we do properly update the `this.updatedAttributes` field. this.updatedAttributes = new HashMap<>(getCurrent().getAttributes()); } this.type = newType; }
public StandardRepositoryRecord(final FlowFileQueue originalQueue, final FlowFileRecord originalFlowFileRecord, final String swapLocation) { this.originalQueue = originalQueue; this.originalFlowFileRecord = originalFlowFileRecord; setType(RepositoryRecordType.SWAP_OUT); this.swapLocation = swapLocation; this.originalAttributes = originalFlowFileRecord == null ? Collections.emptyMap() : originalFlowFileRecord.getAttributes(); }
Map<String, String> getUpdatedAttributes() { if (type == RepositoryRecordType.CREATE) { return getCurrent().getAttributes(); } return updatedAttributes == null ? Collections.emptyMap() : updatedAttributes; }
@Override public Object getFieldValue(final String fieldName) { switch (fieldName) { case FlowFileSchema.ATTRIBUTES: return flowFile.getAttributes(); case FlowFileSchema.CONTENT_CLAIM: return contentClaim; case FlowFileSchema.ENTRY_DATE: return flowFile.getEntryDate(); case FlowFileSchema.FLOWFILE_SIZE: return flowFile.getSize(); case FlowFileSchema.LINEAGE_START_DATE: return flowFile.getLineageStartDate(); case FlowFileSchema.LINEAGE_START_INDEX: return flowFile.getLineageStartIndex(); case FlowFileSchema.QUEUE_DATE: return flowFile.getLastQueueDate(); case FlowFileSchema.QUEUE_DATE_INDEX: return flowFile.getQueueDateIndex(); case FlowFileSchema.RECORD_ID: return flowFile.getId(); } return null; }