private void createRebalanceOperation(PythonOperationInfo info) { DataSet<?> op = sets.getDataSet(info.parentID); sets.add(info.setID, op.rebalance().setParallelism(info.parallelism).name("Rebalance")); }
@Override public Graph<LongValue, NullValue, NullValue> generate() { int scale = Long.SIZE - Long.numberOfLeadingZeros(vertexCount - 1); // Edges int cyclesPerEdge = noiseEnabled ? 5 * scale : scale; List<BlockInfo<T>> generatorBlocks = randomGenerableFactory .getRandomGenerables(edgeCount, cyclesPerEdge); DataSet<Edge<LongValue, NullValue>> edges = env .fromCollection(generatorBlocks) .name("Random generators") .rebalance() .setParallelism(parallelism) .name("Rebalance") .flatMap(new GenerateEdges<>(vertexCount, scale, a, b, c, noiseEnabled, noise)) .setParallelism(parallelism) .name("RMat graph edges"); // Vertices DataSet<Vertex<LongValue, NullValue>> vertices = GraphGeneratorUtils.vertexSet(edges, parallelism); // Graph return Graph.fromDataSet(vertices, edges, env); }
.rebalance() .setParallelism(parallelism) .name("Rebalance") .flatMap(new GenerateGroups<>()) .setParallelism(parallelism)
.rebalance() .setParallelism(parallelism) .name("Rebalance") .flatMap(new GenerateGroups<>()) .setParallelism(parallelism)
private void createRebalanceOperation(OperationInfo info) throws IOException { DataSet op = (DataSet) sets.get(info.parentID); sets.put(info.setID, op.rebalance().name("Rebalance")); }
@Override public Graph<LongValue, NullValue, NullValue> generate() { int scale = Long.SIZE - Long.numberOfLeadingZeros(vertexCount - 1); // Edges int cyclesPerEdge = noiseEnabled ? 5 * scale : scale; List<BlockInfo<T>> generatorBlocks = randomGenerableFactory .getRandomGenerables(edgeCount, cyclesPerEdge); DataSet<Edge<LongValue, NullValue>> edges = env .fromCollection(generatorBlocks) .name("Random generators") .rebalance() .setParallelism(parallelism) .name("Rebalance") .flatMap(new GenerateEdges<>(vertexCount, scale, a, b, c, noiseEnabled, noise)) .setParallelism(parallelism) .name("RMat graph edges"); // Vertices DataSet<Vertex<LongValue, NullValue>> vertices = GraphGeneratorUtils.vertexSet(edges, parallelism); // Graph return Graph.fromDataSet(vertices, edges, env); }
@Override public Graph<LongValue, NullValue, NullValue> generate() { int scale = Long.SIZE - Long.numberOfLeadingZeros(vertexCount - 1); // Edges int cyclesPerEdge = noiseEnabled ? 5 * scale : scale; List<BlockInfo<T>> generatorBlocks = randomGenerableFactory .getRandomGenerables(edgeCount, cyclesPerEdge); DataSet<Edge<LongValue, NullValue>> edges = env .fromCollection(generatorBlocks) .name("Random generators") .rebalance() .setParallelism(parallelism) .name("Rebalance") .flatMap(new GenerateEdges<T>(vertexCount, scale, A, B, C, noiseEnabled, noise)) .setParallelism(parallelism) .name("RMat graph edges"); // Vertices DataSet<Vertex<LongValue, NullValue>> vertices = GraphGeneratorUtils.vertexSet(edges, parallelism); // Graph return Graph.fromDataSet(vertices, edges, env); }
@Override public Graph<LongValue, NullValue, NullValue> generate() { int scale = Long.SIZE - Long.numberOfLeadingZeros(vertexCount - 1); // Edges int cyclesPerEdge = noiseEnabled ? 5 * scale : scale; List<BlockInfo<T>> generatorBlocks = randomGenerableFactory .getRandomGenerables(edgeCount, cyclesPerEdge); DataSet<Edge<LongValue, NullValue>> edges = env .fromCollection(generatorBlocks) .name("Random generators") .rebalance() .setParallelism(parallelism) .name("Rebalance") .flatMap(new GenerateEdges<>(vertexCount, scale, a, b, c, noiseEnabled, noise)) .setParallelism(parallelism) .name("RMat graph edges"); // Vertices DataSet<Vertex<LongValue, NullValue>> vertices = GraphGeneratorUtils.vertexSet(edges, parallelism); // Graph return Graph.fromDataSet(vertices, edges, env); }
/** * creates the initial working set from the edge candidates * * @return initial working set with the expand embeddings */ private DataSet<ExpandEmbedding> preProcess() { if (direction == ExpandDirection.IN) { candidateEdges = candidateEdges .map(new ReverseEdgeEmbedding()) .name(getName() + " - Reverse Edges"); } this.candidateEdgeTuples = candidateEdges .map(new ExtractKeyedCandidateEdges()) .name(getName() + " - Create candidate edge tuples") .partitionByHash(0) .name(getName() + " - Partition edge tuples"); return input.join(candidateEdgeTuples, joinHint) .where(new ExtractExpandColumn(expandColumn)).equalTo(0) .with(new CreateExpandEmbedding( distinctVertexColumns, distinctEdgeColumns, closingColumn )) .name(getName() + " - Initial expansion"); }
/** * creates the initial working set from the edge candidates * * @return initial working set with the expand embeddings */ private DataSet<ExpandEmbedding> preProcess() { if (direction == ExpandDirection.IN) { candidateEdges = candidateEdges .map(new ReverseEdgeEmbedding()) .name(getName() + " - Reverse Edges"); } this.candidateEdgeTuples = candidateEdges .map(new ExtractKeyedCandidateEdges()) .name(getName() + " - Create candidate edge tuples") .partitionByHash(0) .name(getName() + " - Partition edge tuples"); return input.join(candidateEdgeTuples, joinHint) .where(new ExtractExpandColumn(expandColumn)).equalTo(0) .with(new CreateExpandEmbedding( distinctVertexColumns, distinctEdgeColumns, closingColumn )) .name(getName() + " - Initial expansion"); }
.rebalance() .setParallelism(parallelism) .name("Rebalance") .flatMap(new GenerateGroups<>()) .setParallelism(parallelism)
.rebalance() .setParallelism(littleParallelism) .name("Rebalance") .flatMap(new GenerateGroups<K>()) .setParallelism(littleParallelism)
.rebalance() .setParallelism(parallelism) .name("Rebalance") .flatMap(new GenerateGroups<>()) .setParallelism(parallelism)
.partitionByHash(groupKeys) .setParallelism(dop) .name("reduce-" + node.getID());
.rebalance() .setParallelism(littleParallelism) .name("Rebalance") .flatMap(new GenerateGroups<K>()) .setParallelism(littleParallelism)
.rebalance() .setParallelism(parallelism) .name("Rebalance") .flatMap(new GenerateGroups<>()) .setParallelism(parallelism)
.rebalance() .setParallelism(parallelism) .name("Rebalance") .flatMap(new GenerateGroups<>()) .setParallelism(parallelism)