public Tuple<AtlasObjectId, AtlasEntity> getOrCreateQueue(String destinationComponentId) { final String qualifiedName = toQualifiedName(destinationComponentId); final Optional<AtlasObjectId> existingQueueId = findIdByQualifiedName(queues.keySet(), qualifiedName); if (existingQueueId.isPresent()) { final AtlasEntity entity = queues.get(existingQueueId.get()); stillExistingEntityGuids.add(entity.getGuid()); return new Tuple<>(existingQueueId.get(), entity); } else { final AtlasObjectId queueId = new AtlasObjectId(TYPE_NIFI_QUEUE, ATTR_QUALIFIED_NAME, qualifiedName); final AtlasEntity queue = new AtlasEntity(TYPE_NIFI_QUEUE); queue.setAttribute(ATTR_NIFI_FLOW, getAtlasObjectId()); queue.setAttribute(ATTR_QUALIFIED_NAME, qualifiedName); queue.setAttribute(ATTR_NAME, "queue"); queue.setAttribute(ATTR_DESCRIPTION, "Input queue for " + destinationComponentId); queues.put(queueId, queue); return new Tuple<>(queueId, queue); } }
private AtlasEntity createOrUpdateRootGroupPortEntity(boolean isInput, String qualifiedName, String portName) { final Map<AtlasObjectId, AtlasEntity> ports = isInput ? rootInputPortEntities : rootOutputPortEntities; final Optional<AtlasObjectId> existingPortId = findIdByQualifiedName(ports.keySet(), qualifiedName); final String typeName = isInput ? TYPE_NIFI_INPUT_PORT : TYPE_NIFI_OUTPUT_PORT; if (existingPortId.isPresent()) { final AtlasEntity entity = ports.get(existingPortId.get()); final String portGuid = entity.getGuid(); stillExistingEntityGuids.add(portGuid); final Object currentName = entity.getAttribute(ATTR_NAME); if (isUpdated(currentName, portName)) { // Update port name and set updated flag. entity.setAttribute(ATTR_NAME, portName); updatedEntityGuids.add(portGuid); updateAudit.add(String.format("Name of %s %s changed from %s to %s", entity.getTypeName(), portGuid, currentName, portName)); } return entity; } else { final AtlasEntity entity = new AtlasEntity(typeName); entity.setAttribute(ATTR_NIFI_FLOW, getAtlasObjectId()); entity.setAttribute(ATTR_NAME, portName); entity.setAttribute(ATTR_QUALIFIED_NAME, qualifiedName); final AtlasObjectId portId = new AtlasObjectId(typeName, ATTR_QUALIFIED_NAME, qualifiedName); ports.put(portId, entity); return entity; } }
private Tuple<EntityChangeType, AtlasEntity> toAtlasEntity(EntityChangeType changeType, final NiFiFlowPath path) { final AtlasEntity entity = EntityChangeType.CREATED.equals(changeType) ? new AtlasEntity() : new AtlasEntity(path.getExEntity()); entity.setTypeName(TYPE_NIFI_FLOW_PATH); entity.setVersion(1L); entity.setAttribute(ATTR_NIFI_FLOW, getAtlasObjectId()); final StringBuilder name = new StringBuilder(); final StringBuilder description = new StringBuilder(); path.getProcessComponentIds().forEach(pid -> { final String componentName = getProcessComponentName(pid); if (name.length() > 0) { name.append(", "); description.append(", "); } name.append(componentName); description.append(String.format("%s::%s", componentName, pid)); }); path.setName(name.toString()); entity.setAttribute(ATTR_NAME, name.toString()); entity.setAttribute(ATTR_DESCRIPTION, description.toString()); // Use first processor's id as qualifiedName. entity.setAttribute(ATTR_QUALIFIED_NAME, toQualifiedName(path.getId())); entity.setAttribute(ATTR_URL, path.createDeepLinkURL(getUrl())); final boolean inputsChanged = setChangedIOIds(path, entity, true); final boolean outputsChanged = setChangedIOIds(path, entity, false); // Even iff there's no flow path metadata changed, if any IO is changed then the pass should be updated. EntityChangeType finalChangeType = EntityChangeType.AS_IS.equals(changeType) ? (path.isMetadataUpdated() || inputsChanged || outputsChanged ? EntityChangeType.UPDATED : EntityChangeType.AS_IS) : changeType; return new Tuple<>(finalChangeType, entity); }
private AtlasEntity registerNiFiFlowEntity(final NiFiFlow nifiFlow) throws AtlasServiceException { final List<AtlasEntity> entities = new ArrayList<>(); final AtlasEntity.AtlasEntitiesWithExtInfo atlasEntities = new AtlasEntity.AtlasEntitiesWithExtInfo(entities); if (!nifiFlow.isMetadataUpdated()) { // Nothing has been changed, return existing entity. return nifiFlow.getExEntity(); } // Create parent flow entity using existing NiFiFlow entity if available, so that common properties are taken over. final AtlasEntity flowEntity = nifiFlow.getExEntity() != null ? new AtlasEntity(nifiFlow.getExEntity()) : new AtlasEntity(); flowEntity.setTypeName(TYPE_NIFI_FLOW); flowEntity.setVersion(1L); flowEntity.setAttribute(ATTR_NAME, nifiFlow.getFlowName()); flowEntity.setAttribute(ATTR_QUALIFIED_NAME, nifiFlow.toQualifiedName(nifiFlow.getRootProcessGroupId())); flowEntity.setAttribute(ATTR_URL, nifiFlow.getUrl()); flowEntity.setAttribute(ATTR_DESCRIPTION, nifiFlow.getDescription()); // If flowEntity is not persisted yet, then store nifi_flow entity to make nifiFlowId available for other entities. if (flowEntity.getGuid().startsWith("-")) { entities.add(flowEntity); final EntityMutationResponse mutationResponse = atlasClient.createEntities(atlasEntities); logger.debug("Registered a new nifi_flow entity, mutation response={}", mutationResponse); final String assignedNiFiFlowGuid = mutationResponse.getGuidAssignments().get(flowEntity.getGuid()); flowEntity.setGuid(assignedNiFiFlowGuid); nifiFlow.setAtlasGuid(assignedNiFiFlowGuid); } return flowEntity; }
@Override public AtlasEntity createDefaultValue() { AtlasEntity ret = new AtlasEntity(entityDef.getName()); populateDefaultValues(ret); return ret; }
@Override public AtlasEntity createDefaultValue(Object defaultValue){ AtlasEntity ret = new AtlasEntity(entityDef.getName()); populateDefaultValues(ret); return ret; } @Override
@Override public AtlasEntity createDefaultValue(Object defaultValue) { AtlasEntity ret = new AtlasEntity(entityDef.getName()); populateDefaultValues(ret); return ret; }
private AtlasEntity getHiveTableAtlasEntity() { AtlasEntity entity = new AtlasEntity("hive_table"); Map<String, Object> attributes = new HashMap<>(); attributes.put(qualifiedName, "TABLE1.default" + lowerCaseCL1); attributes.put("dbname", "someDB"); attributes.put("name", "somename"); entity.setAttributes(attributes); return entity; }
private AtlasEntity toHiveDatabaseEntity(String clusterName, String dbName) { AtlasEntity entHiveDb = new AtlasEntity(HiveDataTypes.HIVE_DB.getName()); String qualifiedName = HiveMetaStoreBridge.getDBQualifiedName(clusterName, dbName); entHiveDb.setAttribute(AtlasConstants.CLUSTER_NAME_ATTRIBUTE, clusterName); entHiveDb.setAttribute(AtlasClient.NAME, dbName); entHiveDb.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, qualifiedName); return entHiveDb; }
AtlasEntity createStorageDescriptor(String location, String inputFormat, String outputFormat, boolean compressed) throws Exception { AtlasEntity entity = new AtlasEntity(STORAGE_DESC_TYPE); entity.setAttribute("location", location); entity.setAttribute("inputFormat", inputFormat); entity.setAttribute("outputFormat", outputFormat); entity.setAttribute("compressed", compressed); return createInstance(entity, null); }
@Test public void testAddEventsV2() throws Exception { EntityAuditEventV2 event = new EntityAuditEventV2(rand(), System.currentTimeMillis(), "u1", EntityAuditEventV2.EntityAuditActionV2.ENTITY_CREATE, "d1", new AtlasEntity(rand())); eventRepository.putEventsV2(event); List<EntityAuditEventV2> events = eventRepository.listEventsV2(event.getEntityId(), null, (short) 10); assertEquals(events.size(), 1); assertEventV2Equals(events.get(0), event); }
private AtlasEntity toHiveTableEntity(AtlasEntity entHiveDb, String tableName) { AtlasEntity entHiveTable = new AtlasEntity(HiveDataTypes.HIVE_TABLE.getName()); String qualifiedName = HiveMetaStoreBridge.getTableQualifiedName((String)entHiveDb.getAttribute(AtlasConstants.CLUSTER_NAME_ATTRIBUTE), (String)entHiveDb.getAttribute(AtlasClient.NAME), tableName); entHiveTable.setAttribute(AtlasClient.NAME, tableName.toLowerCase()); entHiveTable.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, qualifiedName); entHiveTable.setAttribute(ATTRIBUTE_DB, AtlasTypeUtil.getAtlasObjectId(entHiveDb)); return entHiveTable; }
protected AtlasEntity column(String name, String dataType, String comment, String... traitNames) { AtlasEntity column = new AtlasEntity(COLUMN_TYPE); column.setAttribute("name", name); column.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, "qualified:" + name); column.setAttribute("type", dataType); column.setAttribute("comment", comment); column.setClassifications(Stream.of(traitNames).map(AtlasClassification::new).collect(Collectors.toList())); return column; }
@Test(expectedExceptions = AtlasBaseException.class) public void testCreateRequiredAttrNull() throws Exception { //Update required attribute Map<String, AtlasEntity> tableCloneMap = new HashMap<>(); AtlasEntity tableEntity = new AtlasEntity(TABLE_TYPE); tableEntity.setAttribute(TestUtilsV2.NAME, "table_" + TestUtilsV2.randomString()); tableCloneMap.put(tableEntity.getGuid(), tableEntity); entityStore.createOrUpdate(new InMemoryMapEntityStream(tableCloneMap), false); Assert.fail("Expected exception while creating with required attribute null"); }
AtlasEntity createColumn(String name, String dataType, String comment, String... traitNames) throws Exception { AtlasEntity entity = new AtlasEntity(COLUMN_TYPE); entity.setClassifications(toAtlasClassifications(traitNames)); entity.setAttribute("name", name); entity.setAttribute("dataType", dataType); entity.setAttribute("comment", comment); return createInstance(entity, traitNames); }
AtlasEntity database(String name, String description, String owner, String locationUri, String... traitNames) { AtlasEntity database = new AtlasEntity(DATABASE_TYPE); database.setAttribute("name", name); database.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, "qualified:" + name); database.setAttribute("description", description); database.setAttribute("owner", owner); database.setAttribute("locationUri", locationUri); database.setAttribute("createTime", System.currentTimeMillis()); database.setAttribute("clusterName", "cl1"); database.setClassifications(Stream.of(traitNames).map(AtlasClassification::new).collect(Collectors.toList())); return database; }
public static AtlasEntity createProcessEntity(List<AtlasObjectId> inputs, List<AtlasObjectId> outputs) { AtlasEntity entity = new AtlasEntity(PROCESS_TYPE); entity.setAttribute(NAME, RandomStringUtils.randomAlphanumeric(10)); entity.setAttribute("inputs", inputs); entity.setAttribute("outputs", outputs); return entity; }
private AtlasEntity getHiveDbEntity(String clusterName, String dbName) { AtlasEntity entity = new AtlasEntity(TransformationConstants.HIVE_DATABASE); entity.setAttribute("name", dbName); entity.setAttribute("qualifiedName", dbName + "@" + clusterName); entity.setAttribute("location", "hdfs://localhost.localdomain:8020/warehouse/tablespace/managed/hive/" + dbName + ".db"); entity.setAttribute("clusterName", clusterName); entity.setAttribute("owner", "hive"); entity.setAttribute("ownerType", "USER"); return entity; }
private AtlasEntity getHiveStorageDescriptorEntity(String clusterName, String dbName, String tableName) { String qualifiedName = "hdfs://localhost.localdomain:8020/warehouse/tablespace/managed/hive/" + dbName + ".db" + "/" + tableName; AtlasEntity entity = new AtlasEntity(TransformationConstants.HIVE_STORAGE_DESCRIPTOR); entity.setAttribute("qualifiedName", dbName + "." + tableName + "@" + clusterName + "_storage"); entity.setAttribute("storedAsSubDirectories", false); entity.setAttribute("location", qualifiedName); entity.setAttribute("compressed", false); entity.setAttribute("inputFormat", "org.apache.hadoop.mapred.TextInputFormat"); entity.setAttribute("outputFormat", "org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat"); entity.setAttribute("numBuckets", -1); return entity; }
protected AtlasEntity storageDescriptor(String location, String inputFormat, String outputFormat, boolean compressed, List<AtlasEntity> columns) { AtlasEntity storageDescriptor = new AtlasEntity(STORAGE_DESC_TYPE); storageDescriptor.setAttribute("location", location); storageDescriptor.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, "qualified:" + location); storageDescriptor.setAttribute("inputFormat", inputFormat); storageDescriptor.setAttribute("outputFormat", outputFormat); storageDescriptor.setAttribute("compressed", compressed); storageDescriptor.setAttribute("cols", getAtlasObjectIds(columns)); return storageDescriptor; }