public void addElementsFromFile() { // --------------------------------------------------------- final AddElementsFromFile op = new AddElementsFromFile.Builder() .filename("filename") .generator(ElementGenerator.class) .parallelism(1) .validate(true) .skipInvalidElements(false) .build(); // --------------------------------------------------------- showExample(op, null); } }
public void addElementsFromKafka() { // --------------------------------------------------------- final AddElementsFromKafka op = new AddElementsFromKafka.Builder() .bootstrapServers("hostname1:8080,hostname2:8080") .groupId("groupId1") .topic("topic1") .generator(ElementGenerator.class) .parallelism(1) .validate(true) .skipInvalidElements(false) .build(); // --------------------------------------------------------- showExample(op, null); } }
public void addElementsFromSocket() { // --------------------------------------------------------- final AddElementsFromSocket op = new AddElementsFromSocket.Builder() .hostname("localhost") .port(8080) .delimiter("\n") .generator(ElementGenerator.class) .parallelism(1) .validate(true) .skipInvalidElements(false) .build(); // --------------------------------------------------------- showExample(op, null); } }
@Override @SuppressWarnings("unchecked") public void add(final String key, final Iterable<?> elements) throws OperationException { if (null == elements) { return; } graph.execute(new AddElements.Builder() .input((Iterable<Element>) elements) .build(), context.getUser()); }
printGraph(); final AddElements operation = new AddElements.Builder() .elements(new Entity.Builder() .group("entity") .vertex(6) .property("count", 1) .build()) .build(); printJava("new AddElements.Builder()\n" + " .elements(new Entity.Builder()\n"
store.runAsync(() -> { try { store.execute(new AddElements.Builder() .input(queue) .validate(validate) .skipInvalidElements(skipInvalid) .build(), new Context(new User())); restart = true;
@Override public Void doOperation(final AddElements addElements, final Context context, final Store store) throws OperationException { Iterable<? extends Element> elements = addElements.getInput(); if (addElements.isValidate()) { elements = new ValidatedElements(elements, store.getSchema(), addElements.isSkipInvalidElements()); } addElements(elements, (MapStore) store); return null; }
public void load(final Supplier<Reader> readerSupplier) throws OperationException, IOException { final SuppliedIterable<CSVRecord> csvIterable = new SuppliedIterable<>(() -> { try { return new CSVParser(readerSupplier.get(), CSVFormat.DEFAULT.withFirstRecordAsHeader()); } catch (final IOException e) { throw new RuntimeException("Unable to load csv data", e); } }); try { final OperationChain<Void> populateChain = new OperationChain.Builder() .first(new GenerateElements.Builder<CSVRecord>() .input(csvIterable) .generator(new RoadTrafficCsvElementGenerator()) .build()) .then(new AddElements.Builder() .skipInvalidElements(false) .build()) .build(); this.graph.execute(populateChain, this.user); } finally { CloseableUtil.close(csvIterable); } }
@Override public Object doOperation(final AddElementsFromSocket op, final Context context, final Store store) throws OperationException { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); if (null != op.getParallelism()) { env.setParallelism(op.getParallelism()); } final DataStream<Element> builder = env.socketTextStream(op.getHostname(), op.getPort(), op.getDelimiter()) .flatMap(new GafferMapFunction(String.class, op.getElementGenerator())); if (Boolean.parseBoolean(op.getOption(FlinkConstants.SKIP_REBALANCING))) { builder.addSink(new GafferSink(op, store)); } else { builder.rebalance().addSink(new GafferSink(op, store)); } try { env.execute(op.getClass().getSimpleName() + "-" + op.getHostname() + ":" + op.getPort()); } catch (final Exception e) { throw new OperationException("Failed to add elements from port: " + op.getPort(), e); } return null; } }
@Override public Object doOperation(final AddElementsFromKafka op, final Context context, final Store store) throws OperationException { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); if (null != op.getParallelism()) { env.setParallelism(op.getParallelism()); } final GafferMapFunction function = new GafferMapFunction(op.getConsumeAs(), op.getElementGenerator()); final DataStream<Element> builder = env.addSource( new FlinkKafkaConsumer010<>( op.getTopic(), function.getSerialisationType(), createFlinkProperties(op))) .flatMap(function); if (Boolean.parseBoolean(op.getOption(FlinkConstants.SKIP_REBALANCING))) { builder.addSink(new GafferSink(op, store)); } else { builder.rebalance().addSink(new GafferSink(op, store)); } try { env.execute(op.getClass().getSimpleName() + "-" + op.getGroupId() + "-" + op.getTopic()); } catch (final Exception e) { throw new OperationException("Failed to add elements from Kafka topic: " + op.getTopic(), e); } return null; }
@Override public Object doOperation(final AddElementsFromFile op, final Context context, final Store store) throws OperationException { final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); if (null != op.getParallelism()) { env.setParallelism(op.getParallelism()); } final FlatMapOperator<String, Element> builder = env.readTextFile(op.getFilename()) .flatMap(new GafferMapFunction(String.class, op.getElementGenerator())); if (Boolean.parseBoolean(op.getOption(FlinkConstants.SKIP_REBALANCING))) { builder.output(new GafferOutput(op, store)); } else { builder.rebalance().output(new GafferOutput(op, store)); } try { env.execute(op.getClass().getSimpleName() + "-" + op.getFilename()); } catch (final Exception e) { throw new OperationException("Failed to add elements from file: " + op.getFilename(), e); } return null; } }
private Properties createFlinkProperties(final AddElementsFromKafka operation) { final Properties properties = new Properties(); if (null != operation.getOptions()) { properties.putAll(operation.getOptions()); } properties.put(FLINK_KAFKA_GROUP_ID, operation.getGroupId()); properties.put(FLINK_KAFKA_BOOTSTRAP_SERVERS, StringUtils.join(operation.getBootstrapServers(), ",")); properties.remove(FlinkConstants.SKIP_REBALANCING); return properties; } }
.input(data) .build()) .then(new AddElements()) .build();
resultCache.execute(new AddElements.Builder() .input(elements) .build(), context);
final AddElements addElements = new AddElements.Builder() .elements(elements) .build(); graph.execute(addElements, user);
private void addElements(final AddElements operation, final AccumuloStore store) throws OperationException { try { final Iterable<?extends Element> validatedElements; if (operation.isValidate()) { validatedElements = new ValidatedElements(operation.getInput(), store.getSchema(), operation.isSkipInvalidElements()); } else { validatedElements = operation.getInput(); } store.addElements(validatedElements); } catch (final StoreException e) { throw new OperationException("Failed to add elements", e); } } }
protected Graph createExampleGraph() { final Graph graph = new Graph.Builder() .addSchemas(StreamUtil.openStreams(getClass(), "/example/operation/schema")) .storeProperties(StreamUtil.openStream(getClass(), "/example/operation/mockaccumulostore.properties")) .build(); // Create data generator final DataGenerator dataGenerator = new DataGenerator(); // Load data into memory final List<String> data = DataUtils.loadData(StreamUtil.openStream(getClass(), "/example/operation/data.txt", true)); //add the edges to the graph using an operation chain consisting of: //generateElements - generating edges from the data (note these are directed edges) //addElements - add the edges to the graph final OperationChain addOpChain = new OperationChain.Builder() .first(new GenerateElements.Builder<String>() .generator(dataGenerator) .objects(data) .build()) .then(new AddElements()) .build(); try { graph.execute(addOpChain, new User()); } catch (OperationException e) { throw new RuntimeException(e); } return graph; }
printGraph(); final AddElements operation = new AddElements.Builder() .input(new Entity.Builder() .group("entity") .vertex(6) .property("count", 1) .build()) .build(); printJava("new AddElements.Builder()\n" + " .input(new Entity.Builder()\n"
final AddElements addElements = new AddElements.Builder() .elements(elements) .build(); graph.execute(addElements, user);
.input(dummyData) .build()) .then(new AddElements()) .build();