protected void createEntity(Referenceable ... entities) { final HookNotification.EntityCreateRequest msg = new HookNotification.EntityCreateRequest(NIFI_USER, entities); lineageContext.addMessage(msg); }
metrics.totalMessages = messages.size(); final Map<Boolean, List<HookNotification.HookNotificationMessage>> createAndOthers = messages.stream().collect(groupingBy(msg -> ENTITY_CREATE.equals(msg.getType()))); .flatMap(msg -> ((HookNotification.EntityCreateRequest) msg).getEntities().stream()) .collect(groupingBy(ref -> TYPE_NIFI_FLOW_PATH.equals(ref.typeName))); newEntities.addAll(newFlowPaths); if (!newEntities.isEmpty()) { notifier.accept(Collections.singletonList(new HookNotification.EntityCreateRequest(NIFI_USER, newEntities))); -> ENTITY_PARTIAL_UPDATE.equals(msg.getType()) && TYPE_NIFI_FLOW_PATH.equals(((HookNotification.EntityPartialUpdateRequest)msg).getTypeName()) && ATTR_QUALIFIED_NAME.equals(((HookNotification.EntityPartialUpdateRequest)msg).getAttribute()) )); fromReferenceable(msg.getEntity().get(ATTR_INPUTS), metrics) .entrySet().stream().filter(ref -> !distinctInputs.containsKey(ref.getKey())) .forEach(ref -> distinctInputs.put(ref.getKey(), ref.getValue())); fromReferenceable(msg.getEntity().get(ATTR_OUTPUTS), metrics) .entrySet().stream().filter(ref -> !distinctOutputs.containsKey(ref.getKey())) .forEach(ref -> distinctOutputs.put(ref.getKey(), ref.getValue())); return new HookNotification.EntityPartialUpdateRequest(NIFI_USER, TYPE_NIFI_FLOW_PATH, ATTR_QUALIFIED_NAME, flowPathQualifiedName, flowPathRef); })
protected void addDataSetRefs(DataSetRefs dataSetRefs, Referenceable flowPathRef) { final boolean inputsAdded = addDataSetRefs(dataSetRefs.getInputs(), flowPathRef, ATTR_INPUTS); final boolean outputsAdded = addDataSetRefs(dataSetRefs.getOutputs(), flowPathRef, ATTR_OUTPUTS); if (inputsAdded || outputsAdded) { lineageContext.addMessage(new HookNotification.EntityPartialUpdateRequest(NIFI_USER, TYPE_NIFI_FLOW_PATH, ATTR_QUALIFIED_NAME, (String) flowPathRef.get(ATTR_QUALIFIED_NAME), flowPathRef)); } }
event.addMessage(new HookNotification.EntityUpdateRequest(event.getUser(), new ArrayList<>(entities))); } else { LOG.info("Skipped query {} since it has no getInputs() or resulting getOutputs()", event.getQueryStr());
private void deleteTable(HiveMetaStoreBridge dgiBridge, HiveEventContext event, WriteEntity output) { final String tblQualifiedName = HiveMetaStoreBridge.getTableQualifiedName(dgiBridge.getClusterName(), output.getTable()); LOG.info("Deleting table {} ", tblQualifiedName); event.addMessage( new HookNotification.EntityDeleteRequest(event.getUser(), HiveDataTypes.HIVE_TABLE.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, tblQualifiedName)); }
/** * Atlas notification types. */ enum NotificationType { // Notifications from the Atlas integration hooks. HOOK(HOOK_NOTIFICATION_CLASS, new HookMessageDeserializer()), // Notifications to entity change consumers. ENTITIES(ENTITY_NOTIFICATION_CLASS, new EntityMessageDeserializer()); /** * The notification class associated with this type. */ private final Class classType; /** * The message deserializer for this type. */ private final MessageDeserializer deserializer; NotificationType(Class classType, MessageDeserializer<?> deserializer) { this.classType = classType; this.deserializer = deserializer; } // ----- accessors --------------------------------------------------- public Class getClassType() { return classType; } public MessageDeserializer getDeserializer() { return deserializer; } }
metrics.totalMessages = messages.size(); final Map<Boolean, List<HookNotification.HookNotificationMessage>> createAndOthers = messages.stream().collect(groupingBy(msg -> ENTITY_CREATE.equals(msg.getType()))); .flatMap(msg -> ((HookNotification.EntityCreateRequest) msg).getEntities().stream()) .collect(groupingBy(ref -> TYPE_NIFI_FLOW_PATH.equals(ref.typeName))); newEntities.addAll(newFlowPaths); if (!newEntities.isEmpty()) { notifier.accept(Collections.singletonList(new HookNotification.EntityCreateRequest(NIFI_USER, newEntities))); -> ENTITY_PARTIAL_UPDATE.equals(msg.getType()) && TYPE_NIFI_FLOW_PATH.equals(((HookNotification.EntityPartialUpdateRequest)msg).getTypeName()) && ATTR_QUALIFIED_NAME.equals(((HookNotification.EntityPartialUpdateRequest)msg).getAttribute()) )); fromReferenceable(msg.getEntity().get(ATTR_INPUTS), metrics) .entrySet().stream().filter(ref -> !distinctInputs.containsKey(ref.getKey())) .forEach(ref -> distinctInputs.put(ref.getKey(), ref.getValue())); fromReferenceable(msg.getEntity().get(ATTR_OUTPUTS), metrics) .entrySet().stream().filter(ref -> !distinctOutputs.containsKey(ref.getKey())) .forEach(ref -> distinctOutputs.put(ref.getKey(), ref.getValue())); return new HookNotification.EntityPartialUpdateRequest(NIFI_USER, TYPE_NIFI_FLOW_PATH, ATTR_QUALIFIED_NAME, flowPathQualifiedName, flowPathRef); })
protected void addDataSetRefs(DataSetRefs dataSetRefs, Referenceable flowPathRef) { final boolean inputsAdded = addDataSetRefs(dataSetRefs.getInputs(), flowPathRef, ATTR_INPUTS); final boolean outputsAdded = addDataSetRefs(dataSetRefs.getOutputs(), flowPathRef, ATTR_OUTPUTS); if (inputsAdded || outputsAdded) { lineageContext.addMessage(new HookNotification.EntityPartialUpdateRequest(NIFI_USER, TYPE_NIFI_FLOW_PATH, ATTR_QUALIFIED_NAME, (String) flowPathRef.get(ATTR_QUALIFIED_NAME), flowPathRef)); } }
@SuppressWarnings("unchecked") protected boolean addDataSetRefs(Set<Referenceable> refsToAdd, Referenceable nifiFlowPath, String targetAttribute) { if (refsToAdd != null && !refsToAdd.isEmpty()) { // If nifiFlowPath already has a given dataSetRef, then it needs not to be created. final Function<Referenceable, String> toTypedQualifiedName = ref -> toTypedQualifiedName(ref.getTypeName(), toStr(ref.get(ATTR_QUALIFIED_NAME))); final Collection<Referenceable> refs = Optional.ofNullable((Collection<Referenceable>) nifiFlowPath.get(targetAttribute)).orElseGet(ArrayList::new); final Set<String> existingRefTypedQualifiedNames = refs.stream().map(toTypedQualifiedName).collect(Collectors.toSet()); refsToAdd.stream().filter(ref -> !existingRefTypedQualifiedNames.contains(toTypedQualifiedName.apply(ref))) .forEach(ref -> { if (ref.getId().isUnassigned()) { // Create new entity. logger.debug("Found a new DataSet reference from {} to {}, sending an EntityCreateRequest", new Object[]{toTypedQualifiedName.apply(nifiFlowPath), toTypedQualifiedName.apply(ref)}); final HookNotification.EntityCreateRequest createDataSet = new HookNotification.EntityCreateRequest(NIFI_USER, ref); lineageContext.addMessage(createDataSet); } refs.add(ref); }); if (refs.size() > existingRefTypedQualifiedNames.size()) { // Something has been added. nifiFlowPath.set(targetAttribute, refs); return true; } } return false; }
event.addMessage(new HookNotification.EntityUpdateRequest(event.getUser(), entities));
private void deleteDatabase(HiveMetaStoreBridge dgiBridge, HiveEventContext event) { if (event.getOutputs().size() > 1) { LOG.info("Starting deletion of tables and databases with cascade {} ", event.getQueryStr()); } else { LOG.info("Starting deletion of database {} ", event.getQueryStr()); } for (WriteEntity output : event.getOutputs()) { if (Type.TABLE.equals(output.getType())) { deleteTable(dgiBridge, event, output); } else if (Type.DATABASE.equals(output.getType())) { final String dbQualifiedName = HiveMetaStoreBridge.getDBQualifiedName(dgiBridge.getClusterName(), output.getDatabase().getName()); event.addMessage( new HookNotification.EntityDeleteRequest(event.getUser(), HiveDataTypes.HIVE_DB.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, dbQualifiedName)); } } }
private Referenceable replaceTableQFName(HiveEventContext event, Table oldTable, Table newTable, final Referenceable tableEntity, final String oldTableQFName, final String newTableQFName) throws HiveException { tableEntity.set(AtlasClient.NAME, oldTable.getTableName().toLowerCase()); tableEntity.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, oldTableQFName); //Replace table entity with new name final Referenceable newEntity = new Referenceable(HiveDataTypes.HIVE_TABLE.getName()); newEntity.set(AtlasClient.NAME, newTable.getTableName().toLowerCase()); newEntity.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, newTableQFName); ArrayList<String> alias_list = new ArrayList<>(); alias_list.add(oldTable.getTableName().toLowerCase()); newEntity.set(HiveMetaStoreBridge.TABLE_ALIAS_LIST, alias_list); event.addMessage(new HookNotification.EntityPartialUpdateRequest(event.getUser(), HiveDataTypes.HIVE_TABLE.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, oldTableQFName, newEntity)); return newEntity; }
protected void createEntity(Referenceable ... entities) { final HookNotification.EntityCreateRequest msg = new HookNotification.EntityCreateRequest(NIFI_USER, entities); lineageContext.addMessage(msg); }
event.addMessage(new HookNotification.EntityUpdateRequest(event.getUser(), entities));
private List<Referenceable> replaceColumnQFName(final HiveEventContext event, final List<Referenceable> cols, final String oldTableQFName, final String newTableQFName) { List<Referenceable> newColEntities = new ArrayList<>(); for (Referenceable col : cols) { final String colName = (String) col.get(AtlasClient.NAME); String oldColumnQFName = HiveMetaStoreBridge.getColumnQualifiedName(oldTableQFName, colName); String newColumnQFName = HiveMetaStoreBridge.getColumnQualifiedName(newTableQFName, colName); col.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, oldColumnQFName); Referenceable newColEntity = new Referenceable(HiveDataTypes.HIVE_COLUMN.getName()); ///Only QF Name changes newColEntity.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, newColumnQFName); event.addMessage(new HookNotification.EntityPartialUpdateRequest(event.getUser(), HiveDataTypes.HIVE_COLUMN.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, oldColumnQFName, newColEntity)); newColEntities.add(newColEntity); } return newColEntities; }
private void fireAndForget(FalconEvent event) throws FalconException, URISyntaxException { LOG.info("Entered Atlas hook for Falcon hook operation {}", event.getOperation()); List<HookNotification> messages = new ArrayList<>(); Operation op = getOperation(event.getOperation()); String user = getUser(event.getUser()); LOG.info("fireAndForget user:{}", user); switch (op) { case ADD: messages.add(new EntityCreateRequest(user, createEntities(event, user))); break; } notifyEntities(messages, null); }
private Referenceable replaceSDQFName(final HiveEventContext event, Referenceable tableEntity, final String oldTblQFName, final String newTblQFName) { //Reset storage desc QF Name to old Name final Referenceable sdRef = ((Referenceable) tableEntity.get(HiveMetaStoreBridge.STORAGE_DESC)); sdRef.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, HiveMetaStoreBridge.getStorageDescQFName(oldTblQFName)); //Replace SD QF name fir st to retain tags final String oldSDQFName = HiveMetaStoreBridge.getStorageDescQFName(oldTblQFName); final String newSDQFName = HiveMetaStoreBridge.getStorageDescQFName(newTblQFName); final Referenceable newSDEntity = new Referenceable(HiveDataTypes.HIVE_STORAGEDESC.getName()); newSDEntity.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, newSDQFName); event.addMessage(new HookNotification.EntityPartialUpdateRequest(event.getUser(), HiveDataTypes.HIVE_STORAGEDESC.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, oldSDQFName, newSDEntity)); return newSDEntity; }
@SuppressWarnings("unchecked") protected boolean addDataSetRefs(Set<Referenceable> refsToAdd, Referenceable nifiFlowPath, String targetAttribute) { if (refsToAdd != null && !refsToAdd.isEmpty()) { // If nifiFlowPath already has a given dataSetRef, then it needs not to be created. final Function<Referenceable, String> toTypedQualifiedName = ref -> toTypedQualifiedName(ref.getTypeName(), toStr(ref.get(ATTR_QUALIFIED_NAME))); final Collection<Referenceable> refs = Optional.ofNullable((Collection<Referenceable>) nifiFlowPath.get(targetAttribute)).orElseGet(ArrayList::new); final Set<String> existingRefTypedQualifiedNames = refs.stream().map(toTypedQualifiedName).collect(Collectors.toSet()); refsToAdd.stream().filter(ref -> !existingRefTypedQualifiedNames.contains(toTypedQualifiedName.apply(ref))) .forEach(ref -> { if (ref.getId().isUnassigned()) { // Create new entity. logger.debug("Found a new DataSet reference from {} to {}, sending an EntityCreateRequest", new Object[]{toTypedQualifiedName.apply(nifiFlowPath), toTypedQualifiedName.apply(ref)}); final HookNotification.EntityCreateRequest createDataSet = new HookNotification.EntityCreateRequest(NIFI_USER, ref); lineageContext.addMessage(createDataSet); } refs.add(ref); }); if (refs.size() > existingRefTypedQualifiedNames.size()) { // Something has been added. nifiFlowPath.set(targetAttribute, refs); return true; } } return false; }
newColEntity.set(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, newColumnQFName); event.addMessage(new HookNotification.EntityPartialUpdateRequest(event.getUser(), HiveDataTypes.HIVE_COLUMN.getName(), AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, oldColumnQFName, newColEntity));