public Set<String> getGroups() { return gafferSchema.getGroups(); }
private void buildSparkSchemas() throws SerialisationException { for (final String group : gafferSchema.getGroups()) { groupToSparkSchema.put(group, buildSparkSchema(group)); } LOGGER.debug("Created Spark schema from Gaffer schema"); LOGGER.debug("Spark schema is: {}", groupToSparkSchema); }
private void buildParquetSchema() throws SerialisationException { for (final String group : gafferSchema.getGroups()) { groupToParquetSchema.put(group, buildParquetSchema(group)); } }
public static void setLocalityGroups(final AccumuloStore store) throws StoreException { final String tableName = store.getTableName(); Map<String, Set<Text>> localityGroups = new HashMap<>(); for (final String group : store.getSchema().getGroups()) { HashSet<Text> localityGroup = new HashSet<>(); localityGroup.add(new Text(group)); localityGroups.put(group, localityGroup); } LOGGER.info("Setting locality groups on table {}", tableName); try { store.getConnection().tableOperations().setLocalityGroups(tableName, localityGroups); } catch (final AccumuloException | AccumuloSecurityException | TableNotFoundException e) { throw new StoreException(e.getMessage(), e); } }
private void buildConverters() { for (final String group : gafferSchema.getGroups()) { final GafferGroupObjectConverter converter = new GafferGroupObjectConverter(group, getColumnToSerialiser(group), getSerialisers(), getColumnToPaths(group)); groupToObjectConverter.put(group, converter); } }
public MapImpl(final Schema schema, final MapStoreProperties mapStoreProperties) { this.schema = schema; propertyAggregator = new AggregatorUtil.IngestPropertiesBinaryOperator(schema); mapFactory = createMapFactory(schema, mapStoreProperties); maintainIndex = mapStoreProperties.getCreateIndex(); for (final String group : schema.getGroups()) { aggElements.put(group, mapFactory.getMap(group + "|" + AGG_ELEMENTS, Element.class, GroupedProperties.class)); nonAggElements.put(group, mapFactory.getMap(group + "|" + NON_AGG_ELEMENTS, Element.class, Long.class)); } if (maintainIndex) { entityIdToElements = mapFactory.getMultiMap(ENTITY_ID_TO_ELEMENTS, EntityId.class, Element.class); edgeIdToElements = mapFactory.getMultiMap(EDGE_ID_TO_ELEMENTS, EdgeId.class, Element.class); } else { entityIdToElements = null; edgeIdToElements = null; } this.aggregatedGroups = schema.getAggregatedGroups(); schema.getEntityGroups().forEach(this::addToGroupByMap); schema.getEdgeGroups().forEach(this::addToGroupByMap); }
private void updateStore(final GraphConfig config) { if (null == store) { store = Store.createStore(config.getGraphId(), cloneSchema(schema), properties); } else if ((null != config.getGraphId() && !config.getGraphId().equals(store.getGraphId())) || (null != schema) || (null != properties && !properties.equals(store.getProperties()))) { if (null == config.getGraphId()) { config.setGraphId(store.getGraphId()); } if (null == schema || schema.getGroups().isEmpty()) { schema = store.getSchema(); } if (null == properties) { properties = store.getProperties(); } try { store.initialise(config.getGraphId(), cloneSchema(schema), properties); } catch (final StoreException e) { throw new IllegalArgumentException("Unable to initialise the store with the given graphId, schema and properties", e); } } store.setGraphLibrary(config.getLibrary()); if (null == schema || schema.getGroups().isEmpty()) { schema = store.getSchema(); } }
final Set<String> graphGroups = graph.getSchema().getGroups(); final Iterable<? extends Element> filteredInput = Iterables.filter( addElements.getInput(),