@JsonIgnore public Schema getDeserialisedSchema() { if (null == deserialisedSchema) { if (null == graph) { deserialisedSchema = null != schema ? Schema.fromJson(schema) : null; } else { deserialisedSchema = graph.getSchema(); } } return deserialisedSchema; }
private static View createView(final Schema schema) { return new View.Builder() .entities(schema.getEntityGroups()) .edges(schema.getEdgeGroups()) .build(); } }
@Override public Element call(final Element v1, final Element v2) { if (null == gafferSchema) { gafferSchema = Schema.fromJson(jsonGafferSchema); } final ElementAggregator aggregator = gafferSchema.getElement(v2.getGroup()).getIngestAggregator(); return aggregator.apply(v1, v2); } }
private Set<String> getGroupsThatHaveProperty(final String property) { final Set<String> groups = new HashSet<>(); for (final String entityGroup : schema.getEntityGroups()) { if (schema.getEntity(entityGroup).getProperties().contains(property)) { groups.add(entityGroup); } } for (final String edgeGroup : schema.getEdgeGroups()) { if (schema.getEdge(edgeGroup).getProperties().contains(property)) { groups.add(edgeGroup); } } return groups; }
@Override public Tuple2<List<Object>, Element> call(final Element element) throws Exception { if (null == gafferSchema) { gafferSchema = Schema.fromJson(jsonGafferSchema); } final String group = element.getGroup(); final List<Object> list = new ArrayList<>(); list.add(group); if (gafferSchema.getEntityGroups().contains(group)) { final Entity entity = (Entity) element; list.add(entity.getVertex()); } else { final Edge edge = (Edge) element; list.add(edge.getSource()); list.add(edge.getDestination()); list.add(edge.getDirectedType()); } for (final String property : gafferSchema.getElement(group).getGroupBy()) { list.add(element.getProperty(property)); } return new Tuple2<>(list, element); } }
public MapImpl(final Schema schema, final MapStoreProperties mapStoreProperties) { this.schema = schema; propertyAggregator = new AggregatorUtil.IngestPropertiesBinaryOperator(schema); mapFactory = createMapFactory(schema, mapStoreProperties); maintainIndex = mapStoreProperties.getCreateIndex(); for (final String group : schema.getGroups()) { aggElements.put(group, mapFactory.getMap(group + "|" + AGG_ELEMENTS, Element.class, GroupedProperties.class)); nonAggElements.put(group, mapFactory.getMap(group + "|" + NON_AGG_ELEMENTS, Element.class, Long.class)); } if (maintainIndex) { entityIdToElements = mapFactory.getMultiMap(ENTITY_ID_TO_ELEMENTS, EntityId.class, Element.class); edgeIdToElements = mapFactory.getMultiMap(EDGE_ID_TO_ELEMENTS, EdgeId.class, Element.class); } else { entityIdToElements = null; edgeIdToElements = null; } this.aggregatedGroups = schema.getAggregatedGroups(); schema.getEntityGroups().forEach(this::addToGroupByMap); schema.getEdgeGroups().forEach(this::addToGroupByMap); }
private SchemaElementDefinition getSchemaElementDefinition(final String group) { final SchemaElementDefinition elementDefinition = schema.getElement(group); if (null == elementDefinition) { throw new AccumuloElementConversionException("No SchemaElementDefinition found for group " + group + ", is this group in your schema or do your table iterators need updating?"); } return elementDefinition; }
private MessageType buildParquetSchema(final String group) throws SerialisationException { SchemaElementDefinition groupGafferSchema; final boolean isEntity = gafferSchema.getEntityGroups().contains(group); final StringBuilder schemaString = new StringBuilder("message Element {\n"); Serialiser serialiser = gafferSchema.getVertexSerialiser(); groupGafferSchema = gafferSchema.getEntity(group); schemaString.append(convertColumnSerialiserToParquetColumns(serialiser, ParquetStoreConstants.VERTEX)).append("\n"); addGroupColumnToSerialiser(group, ParquetStoreConstants.VERTEX, serialiser); } else { groupGafferSchema = gafferSchema.getEdge(group); throw new SchemaException("The ParquetStore does not support properties which contain the characters '_' or '.'"); TypeDefinition type = gafferSchema.getType(entry.getValue()); addGroupColumnToSerialiser(group, entry.getKey(), type.getSerialiserClass()); schemaString.append(convertColumnSerialiserToParquetColumns(getSerialiser(type.getSerialiserClass()), entry.getKey())).append("\n");
public Set<String> getGroups() { return gafferSchema.getGroups(); }
public boolean queryTimeAggregatorRequired(final View view, final AccumuloStore store) { Schema schema = store.getSchema(); if (!schema.isAggregationEnabled()) { return false; String visibilityProp = schema.getVisibilityProperty(); for (final String edgeGroup : view.getEdgeGroups()) { SchemaEdgeDefinition edgeDefinition = schema.getEdge(edgeGroup); if (null != edgeDefinition) { if (edgeDefinition.containsProperty(visibilityProp)) { SchemaEntityDefinition entityDefinition = schema.getEntity(entityGroup); if (null != entityDefinition) { if (entityDefinition.containsProperty(visibilityProp)) {
public AggregateGafferElements(final Schema gafferSchema) { jsonGafferSchema = gafferSchema.toCompactJson(); }
@Override public Object getProperty(final String name, final Properties lazyProperties) { if (null == eDef) { eDef = schema.getElement(group); if (null == eDef) { throw new IllegalArgumentException("Element definition for " + group + " could not be found in the schema"); } } final Properties props; if (eDef.getGroupBy().contains(name)) { props = elementConverter.getPropertiesFromColumnQualifier(group, key.getColumnQualifierData().getBackingArray()); } else if (name.equals(schema.getVisibilityProperty())) { props = elementConverter.getPropertiesFromColumnVisibility(group, key.getColumnVisibilityData().getBackingArray()); } else if (name.equals(timestampProperty)) { props = elementConverter.getPropertiesFromTimestamp(group, key.getTimestamp()); } else { props = elementConverter.getPropertiesFromValue(group, value); } lazyProperties.putAll(props); return props.get(name); } }
private void buildSchema() { LOGGER.info("Building Spark SQL schema for groups {}", StringUtils.join(groups, ',')); for (final String group : groups) { final SchemaElementDefinition elementDefn = schema.getElement(group); final List<StructField> structFieldList = new ArrayList<>(); if (elementDefn instanceof SchemaEntityDefinition) { entityOrEdgeByGroup.put(group, EntityOrEdge.ENTITY); final SchemaEntityDefinition entityDefinition = (SchemaEntityDefinition) elementDefn; final String vertexClass = schema.getType(entityDefinition.getVertex()).getFullClassString(); final DataType vertexType = getType(vertexClass); if (null == vertexType) { entityOrEdgeByGroup.put(group, EntityOrEdge.EDGE); final SchemaEdgeDefinition edgeDefinition = (SchemaEdgeDefinition) elementDefn; final String srcClass = schema.getType(edgeDefinition.getSource()).getFullClassString(); final String dstClass = schema.getType(edgeDefinition.getDestination()).getFullClassString(); final DataType srcType = getType(srcClass); final DataType dstType = getType(dstClass);
public Set<String> getEdgeGroups() { return gafferSchema.getEdgeGroups(); }
public Set<String> getEntityGroups() { return gafferSchema.getEntityGroups(); }
public RetrieveElementsFromFile(final Path filePath, final FilterPredicate filter, final Schema gafferSchema, final ConcurrentLinkedQueue<Element> queue, final boolean needsValidatorsAndFiltersApplying, final boolean skipValidation, final View view, final User user) { this.filePath = filePath; this.filter = filter; this.jsonGafferSchema = gafferSchema.toCompactJson(); this.gafferSchema = gafferSchema; if (gafferSchema.getVisibilityProperty() != null) { this.visibility = gafferSchema.getVisibilityProperty(); } else { this.visibility = new String(); } if (user != null && user.getDataAuths() != null) { final Set<String> dataAuths = user.getDataAuths(); this.auths = new Authorisations(dataAuths.toArray(new String[dataAuths.size()])); } else { this.auths = new Authorisations(); } this.queue = queue; this.view = view; this.needsValidatorsAndFiltersApplying = needsValidatorsAndFiltersApplying; this.skipValidation = skipValidation; if (filePath.getName().contains("=")) { group = filePath.getName().split("=")[1]; } else { group = filePath.getParent().getName().split("=")[1]; } elementDefinitionJson = view.getElement(group).toCompactJson(); }
private void validateSameAsFromCache(final Graph newGraph, final String graphId) { final Graph fromCache = federatedStoreCache.getGraphSerialisableFromCache(graphId).getGraph(graphLibrary); if (!newGraph.getStoreProperties().getProperties().equals(fromCache.getStoreProperties().getProperties())) { throw new RuntimeException(String.format(ERROR_ADDING_GRAPH_TO_CACHE, GraphConfigEnum.PROPERTIES.toString(), graphId)); } else { if (!JsonUtil.equals(newGraph.getSchema().toJson(false), fromCache.getSchema().toJson(false))) { throw new RuntimeException(String.format(ERROR_ADDING_GRAPH_TO_CACHE, GraphConfigEnum.SCHEMA.toString(), graphId)); } else { if (!newGraph.getGraphId().equals(fromCache.getGraphId())) { throw new RuntimeException(String.format(ERROR_ADDING_GRAPH_TO_CACHE, "GraphId", graphId)); } } } }
private void addToGroupByMap(final String group) { final SchemaElementDefinition sed = schema.getElement(group); groupToGroupByProperties.put(group, sed.getGroupBy()); if (!aggregatedGroups.contains(group)) { groupsWithNoAggregation.add(group); } final Set<String> nonGroupByProperties = new HashSet<>(sed.getProperties()); nonGroupByProperties.removeAll(sed.getGroupBy()); groupToNonGroupByProperties.put(group, nonGroupByProperties); } }
private void buildSparkSchemas() throws SerialisationException { for (final String group : gafferSchema.getGroups()) { groupToSparkSchema.put(group, buildSparkSchema(group)); } LOGGER.debug("Created Spark schema from Gaffer schema"); LOGGER.debug("Spark schema is: {}", groupToSparkSchema); }
public ExtractKeyFromElements(final Schema gafferSchema) { jsonGafferSchema = gafferSchema.toCompactJson(); }