public static <A> ObjectWritable<A> empty() { return new ObjectWritable<>(null); }
@Override public <I extends InputShim> ObjectWritable<T> read(final KryoShim<I, ?> kryo, final I input, final Class<ObjectWritable<T>> clazz) { return new ObjectWritable(kryo.readClassAndObject(input)); } }
@Override public <K, V> Iterator<KeyValue<K, V>> writeMemoryRDD(final Configuration configuration, final String memoryKey, JavaPairRDD<K, V> memoryRDD) { final org.apache.hadoop.conf.Configuration hadoopConfiguration = ConfUtil.makeHadoopConfiguration(configuration); final String outputLocation = hadoopConfiguration.get(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION); if (null != outputLocation) { // map back to a Hadoop stream for output memoryRDD.mapToPair(keyValue -> new Tuple2<>(new ObjectWritable<>(keyValue._1()), new ObjectWritable<>(keyValue._2()))) .saveAsNewAPIHadoopFile(Constants.getMemoryLocation(outputLocation, memoryKey), ObjectWritable.class, ObjectWritable.class, SequenceFileOutputFormat.class, hadoopConfiguration); try { return (Iterator) new ObjectWritableIterator(hadoopConfiguration, new Path(Constants.getMemoryLocation(outputLocation, memoryKey))); } catch (final IOException e) { throw new IllegalStateException(e.getMessage(), e); } } return Collections.emptyIterator(); } }
@Override public void add(final String key, final Object value) { checkKeyValue(key, value); if (this.inExecute) this.sparkMemory.get(key).add(new ObjectWritable<>(value)); else throw Memory.Exceptions.memoryAddOnlyDuringVertexProgramExecute(key); }
@Override public void set(final String key, final Object value) { checkKeyValue(key, value); if (this.inExecute) throw Memory.Exceptions.memorySetOnlyDuringVertexProgramSetUpAndTerminate(key); else this.sparkMemory.get(key).setValue(new ObjectWritable<>(value)); }
@Override public ObjectWritable<A> addAccumulator(final ObjectWritable<A> a, final ObjectWritable<A> b) { if (a.isEmpty()) return b; if (b.isEmpty()) return a; return new ObjectWritable<>(this.memoryComputeKey.getReducer().apply(a.get(), b.get())); }
public static <A> ObjectWritable<A> empty() { return new ObjectWritable<>(null); }
@Override public <I extends InputShim> ObjectWritable<T> read(final KryoShim<I, ?> kryo, final I input, final Class<ObjectWritable<T>> clazz) { return new ObjectWritable(kryo.readClassAndObject(input)); } }
@Override public <K, V> Iterator<KeyValue<K, V>> writeMemoryRDD(final Configuration configuration, final String memoryKey, JavaPairRDD<K, V> memoryRDD) { final org.apache.hadoop.conf.Configuration hadoopConfiguration = ConfUtil.makeHadoopConfiguration(configuration); final String outputLocation = hadoopConfiguration.get(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION); if (null != outputLocation) { // map back to a Hadoop stream for output memoryRDD.mapToPair(keyValue -> new Tuple2<>(new ObjectWritable<>(keyValue._1()), new ObjectWritable<>(keyValue._2()))) .saveAsNewAPIHadoopFile(Constants.getMemoryLocation(outputLocation, memoryKey), ObjectWritable.class, ObjectWritable.class, SequenceFileOutputFormat.class, hadoopConfiguration); try { return (Iterator) new ObjectWritableIterator(hadoopConfiguration, new Path(Constants.getMemoryLocation(outputLocation, memoryKey))); } catch (final IOException e) { throw new IllegalStateException(e.getMessage(), e); } } return Collections.emptyIterator(); } }
@Override public void add(final String key, final Object value) { checkKeyValue(key, value); if (this.inExecute) { this.sparkMemory.get(key).add(new ObjectWritable<>(value)); } else { throw Memory.Exceptions.memoryAddOnlyDuringVertexProgramExecute(key); } }
@Override public void add(final String key, final Object value) { checkKeyValue(key, value); if (this.inExecute) this.sparkMemory.get(key).add(new ObjectWritable<>(value)); else throw Memory.Exceptions.memoryAddOnlyDuringVertexProgramExecute(key); }
@Override public void set(final String key, final Object value) { checkKeyValue(key, value); if (this.inExecute) throw Memory.Exceptions.memorySetOnlyDuringVertexProgramSetUpAndTerminate(key); else this.sparkMemory.get(key).setValue(new ObjectWritable<>(value)); }
@Override public void set(final String key, final Object value) { checkKeyValue(key, value); if (this.inExecute) { throw Memory.Exceptions.memorySetOnlyDuringVertexProgramSetUpAndTerminate(key); } else { this.sparkMemory.get(key).setValue(new ObjectWritable<>(value)); } }
@Override public ObjectWritable<A> addAccumulator(final ObjectWritable<A> a, final ObjectWritable<A> b) { if (a.isEmpty()) return b; if (b.isEmpty()) return a; return new ObjectWritable<>(this.memoryComputeKey.getReducer().apply(a.get(), b.get())); }