public FulgoraMemory(final VertexProgram<?> vertexProgram, final Set<MapReduce> mapReducers) { this.currentMap = new ConcurrentHashMap<>(); this.previousMap = new ConcurrentHashMap<>(); if (null != vertexProgram) { for (final String key : vertexProgram.getMemoryComputeKeys()) { MemoryHelper.validateKey(key); this.memoryKeys.add(key); } } for (final MapReduce mapReduce : mapReducers) { this.memoryKeys.add(mapReduce.getMemoryKey()); } }
public FulgoraMemory(final VertexProgram<?> vertexProgram, final Set<MapReduce> mapReducers) { this.currentMap = new ConcurrentHashMap<>(); this.previousMap = new ConcurrentHashMap<>(); if (null != vertexProgram) { for (final MemoryComputeKey key : vertexProgram.getMemoryComputeKeys()) { this.memoryKeys.put(key.getKey(), key); } } for (final MapReduce mapReduce : mapReducers) { this.memoryKeys.put(mapReduce.getMemoryKey(), MemoryComputeKey.of(mapReduce.getMemoryKey(), Operator.assign, false, false)); } }
public void addVertexProgramMemoryComputeKeys(final VertexProgram<?> vertexProgram) { vertexProgram.getMemoryComputeKeys().forEach(key -> this.memoryComputeKeys.put(key.getKey(), key)); }
public static void validateProgramOnComputer(final GraphComputer computer, final VertexProgram vertexProgram) { if (vertexProgram.getMemoryComputeKeys().contains(null)) throw Memory.Exceptions.memoryKeyCanNotBeNull(); if (vertexProgram.getMemoryComputeKeys().contains("")) throw Memory.Exceptions.memoryKeyCanNotBeEmpty(); final GraphComputer.Features graphComputerFeatures = computer.features(); final VertexProgram.Features vertexProgramFeatures = vertexProgram.getFeatures(); for (final Method method : VertexProgram.Features.class.getMethods()) { if (method.getName().startsWith("requires")) { final boolean supports; final boolean requires; try { supports = (boolean) GraphComputer.Features.class.getMethod(method.getName().replace("requires", "supports")).invoke(graphComputerFeatures); requires = (boolean) method.invoke(vertexProgramFeatures); } catch (final Exception e) { throw new IllegalStateException("A reflection exception has occurred: " + e.getMessage(), e); } if (requires && !supports) throw new IllegalStateException("The vertex program can not be executed on the graph computer: " + method.getName()); } } }
public TinkerMemory(final VertexProgram<?> vertexProgram, final Set<MapReduce> mapReducers) { this.currentMap = new ConcurrentHashMap<>(); this.previousMap = new ConcurrentHashMap<>(); if (null != vertexProgram) { for (final MemoryComputeKey memoryComputeKey : vertexProgram.getMemoryComputeKeys()) { this.memoryKeys.put(memoryComputeKey.getKey(), memoryComputeKey); } } for (final MapReduce mapReduce : mapReducers) { this.memoryKeys.put(mapReduce.getMemoryKey(), MemoryComputeKey.of(mapReduce.getMemoryKey(), Operator.assign, false, false)); } }
public SparkMemory(final VertexProgram<?> vertexProgram, final Set<MapReduce> mapReducers, final JavaSparkContext sparkContext) { if (null != vertexProgram) { for (final MemoryComputeKey key : vertexProgram.getMemoryComputeKeys()) { this.memoryComputeKeys.put(key.getKey(), key); } } for (final MapReduce mapReduce : mapReducers) { this.memoryComputeKeys.put(mapReduce.getMemoryKey(), MemoryComputeKey.of(mapReduce.getMemoryKey(), Operator.assign, false, false)); } for (final MemoryComputeKey memoryComputeKey : this.memoryComputeKeys.values()) { this.sparkMemory.put( memoryComputeKey.getKey(), sparkContext.accumulator(ObjectWritable.empty(), memoryComputeKey.getKey(), new MemoryAccumulator<>(memoryComputeKey))); } this.broadcast = sparkContext.broadcast(Collections.emptyMap()); }
public void addVertexProgramMemoryComputeKeys(final VertexProgram<?> vertexProgram) { vertexProgram.getMemoryComputeKeys().forEach(key -> this.memoryComputeKeys.put(key.getKey(), key)); }
public FulgoraMemory(final VertexProgram<?> vertexProgram, final Set<MapReduce> mapReducers) { this.currentMap = new ConcurrentHashMap<>(); this.previousMap = new ConcurrentHashMap<>(); if (null != vertexProgram) { for (final String key : vertexProgram.getMemoryComputeKeys()) { MemoryHelper.validateKey(key); this.memoryKeys.add(key); } } for (final MapReduce mapReduce : mapReducers) { this.memoryKeys.add(mapReduce.getMemoryKey()); } }
public DuctileMemory(VertexProgram<M> vertexProgram, Set<MapReduce<?, ?, ?, ?, R>> mapReducers) { if (null != vertexProgram) { for (String key : vertexProgram.getMemoryComputeKeys()) { MemoryHelper.validateKey(key); memoryKeys.add(key); } } for (MapReduce<?, ?, ?, ?, R> mapReduce : mapReducers) { memoryKeys.add(mapReduce.getMemoryKey()); } }
public static void validateProgramOnComputer(final GraphComputer computer, final VertexProgram vertexProgram) { if (vertexProgram.getMemoryComputeKeys().contains(null)) throw Memory.Exceptions.memoryKeyCanNotBeNull(); if (vertexProgram.getMemoryComputeKeys().contains("")) throw Memory.Exceptions.memoryKeyCanNotBeEmpty(); final GraphComputer.Features graphComputerFeatures = computer.features(); final VertexProgram.Features vertexProgramFeatures = vertexProgram.getFeatures(); for (final Method method : VertexProgram.Features.class.getMethods()) { if (method.getName().startsWith("requires")) { final boolean supports; final boolean requires; try { supports = (boolean) GraphComputer.Features.class.getMethod(method.getName().replace("requires", "supports")).invoke(graphComputerFeatures); requires = (boolean) method.invoke(vertexProgramFeatures); } catch (final Exception e) { throw new IllegalStateException("A reflection exception has occurred: " + e.getMessage(), e); } if (requires && !supports) throw new IllegalStateException("The vertex program can not be executed on the graph computer: " + method.getName()); } } }
public TinkerMemory(final VertexProgram<?> vertexProgram, final Set<MapReduce> mapReducers) { this.currentMap = new ConcurrentHashMap<>(); this.previousMap = new ConcurrentHashMap<>(); if (null != vertexProgram) { for (final MemoryComputeKey memoryComputeKey : vertexProgram.getMemoryComputeKeys()) { this.memoryKeys.put(memoryComputeKey.getKey(), memoryComputeKey); } } for (final MapReduce mapReduce : mapReducers) { this.memoryKeys.put(mapReduce.getMemoryKey(), MemoryComputeKey.of(mapReduce.getMemoryKey(), Operator.assign, false, false)); } }
public TinkerMemory(final VertexProgram<?> vertexProgram, final Set<MapReduce> mapReducers) { this.currentMap = new ConcurrentHashMap<>(); this.previousMap = new ConcurrentHashMap<>(); if (null != vertexProgram) { for (final MemoryComputeKey memoryComputeKey : vertexProgram.getMemoryComputeKeys()) { this.memoryKeys.put(memoryComputeKey.getKey(), memoryComputeKey); } } for (final MapReduce mapReduce : mapReducers) { this.memoryKeys.put(mapReduce.getMemoryKey(), MemoryComputeKey.of(mapReduce.getMemoryKey(), Operator.assign, false, false)); } }
public SparkMemory(final VertexProgram<?> vertexProgram, final Set<MapReduce> mapReducers, final JavaSparkContext sparkContext) { if (null != vertexProgram) { for (final MemoryComputeKey key : vertexProgram.getMemoryComputeKeys()) { this.memoryComputeKeys.put(key.getKey(), key); } } for (final MapReduce mapReduce : mapReducers) { this.memoryComputeKeys.put(mapReduce.getMemoryKey(), MemoryComputeKey.of(mapReduce.getMemoryKey(), Operator.assign, false, false)); } for (final MemoryComputeKey memoryComputeKey : this.memoryComputeKeys.values()) { this.sparkMemory.put( memoryComputeKey.getKey(), sparkContext.accumulator(ObjectWritable.empty(), memoryComputeKey.getKey(), new MemoryAccumulator<>(memoryComputeKey))); } this.broadcast = sparkContext.broadcast(Collections.emptyMap()); }
public GraknSparkMemory(final VertexProgram<?> vertexProgram, final Set<MapReduce> mapReducers, final JavaSparkContext sparkContext) { if (null != vertexProgram) { for (final MemoryComputeKey key : vertexProgram.getMemoryComputeKeys()) { this.memoryComputeKeys.put(key.getKey(), key); } } for (final MapReduce mapReduce : mapReducers) { this.memoryComputeKeys.put( mapReduce.getMemoryKey(), MemoryComputeKey.of(mapReduce.getMemoryKey(), Operator.assign, false, false)); } for (final MemoryComputeKey memoryComputeKey : this.memoryComputeKeys.values()) { this.sparkMemory.put( memoryComputeKey.getKey(), sparkContext.accumulator(ObjectWritable.empty(), memoryComputeKey.getKey(), new MemoryAccumulator<>(memoryComputeKey))); } this.broadcast = sparkContext.broadcast(Collections.emptyMap()); }