@Override public ObjectWritable<A> addAccumulator(final ObjectWritable<A> a, final ObjectWritable<A> b) { if (a.isEmpty()) return b; if (b.isEmpty()) return a; return new ObjectWritable<>(this.memoryComputeKey.getReducer().apply(a.get(), b.get())); }
@Override public ObjectWritable<A> zero(final ObjectWritable<A> a) { return ObjectWritable.empty(); } }
private void writeObject(final ObjectOutputStream outputStream) throws IOException { this.write(outputStream); }
@Override public int compare(byte[] bytes, int i, int i1, byte[] bytes1, int i2, int i3) { try { this.objectWritable1.readFields(new DataInputStream(new ByteArrayInputStream(bytes, i, i1))); this.objectWritable2.readFields(new DataInputStream(new ByteArrayInputStream(bytes1, i2, i3))); //System.out.println(objectWritable1 + "<=>" + objectWritable2 + ":::" + this.comparator.compare(objectWritable1.get(), objectWritable2.get())); return this.comparator.compare(this.objectWritable1.get(), this.objectWritable2.get()); } catch (final Exception e) { LOGGER.error(e.getMessage()); throw new IllegalStateException(e.getMessage()); } }
public static <A> ObjectWritable<A> empty() { return new ObjectWritable<>(null); }
@Override public Set<String> keys() { if (this.inExecute) return this.broadcast.getValue().keySet(); else { final Set<String> trueKeys = new HashSet<>(); this.sparkMemory.forEach((key, value) -> { if (!value.value().isEmpty()) trueKeys.add(key); }); return Collections.unmodifiableSet(trueKeys); } }
private void readObject(final ObjectInputStream inputStream) throws IOException, ClassNotFoundException { this.readFields(inputStream); }
@Override public int compareTo(final ObjectWritable objectWritable) { if (null == this.t) return objectWritable.isEmpty() ? 0 : -1; else if (this.t instanceof Comparable && !objectWritable.isEmpty()) return ((Comparable) this.t).compareTo(objectWritable.get()); else if (this.t.equals(objectWritable.get())) return 0; else return -1; }
@Override public <I extends InputShim> ObjectWritable<T> read(final KryoShim<I, ?> kryo, final I input, final Class<ObjectWritable<T>> clazz) { return new ObjectWritable(kryo.readClassAndObject(input)); } }
@Override public <O extends OutputShim> void write(final KryoShim<?, O> kryo, final O output, final ObjectWritable<T> starGraph) { kryo.writeClassAndObject(output, starGraph.get()); }
protected void broadcastMemory(final JavaSparkContext sparkContext) { this.broadcast.destroy(true); // do we need to block? final Map<String, Object> toBroadcast = new HashMap<>(); this.sparkMemory.forEach((key, object) -> { if (!object.value().isEmpty() && this.memoryComputeKeys.get(key).isBroadcast()) toBroadcast.put(key, object.value()); }); this.broadcast = sparkContext.broadcast(toBroadcast); }
@Override public int compare(byte[] bytes, int i, int i1, byte[] bytes1, int i2, int i3) { try { this.objectWritable1.readFields(new DataInputStream(new ByteArrayInputStream(bytes, i, i1))); this.objectWritable2.readFields(new DataInputStream(new ByteArrayInputStream(bytes1, i2, i3))); //System.out.println(objectWritable1 + "<=>" + objectWritable2 + ":::" + this.comparator.compare(objectWritable1.get(), objectWritable2.get())); return this.comparator.compare(this.objectWritable1.get(), this.objectWritable2.get()); } catch (final Exception e) { LOGGER.error(e.getMessage()); throw new IllegalStateException(e.getMessage()); } }
private void readObject(final ObjectInputStream inputStream) throws IOException, ClassNotFoundException { this.readFields(inputStream); }
@Override public <R> R get(final String key) throws IllegalArgumentException { if (!this.memoryComputeKeys.containsKey(key)) throw Memory.Exceptions.memoryDoesNotExist(key); if (this.inExecute && !this.memoryComputeKeys.get(key).isBroadcast()) throw Memory.Exceptions.memoryDoesNotExist(key); final ObjectWritable<R> r = (ObjectWritable<R>) (this.inExecute ? this.broadcast.value().get(key) : this.sparkMemory.get(key).value()); if (null == r || r.isEmpty()) throw Memory.Exceptions.memoryDoesNotExist(key); else return r.get(); }
@Override public ObjectWritable<A> addAccumulator(final ObjectWritable<A> a, final ObjectWritable<A> b) { if (a.isEmpty()) return b; if (b.isEmpty()) return a; return new ObjectWritable<>(this.memoryComputeKey.getReducer().apply(a.get(), b.get())); }
@Override public <K, V> Iterator<KeyValue<K, V>> writeMemoryRDD(final Configuration configuration, final String memoryKey, JavaPairRDD<K, V> memoryRDD) { final org.apache.hadoop.conf.Configuration hadoopConfiguration = ConfUtil.makeHadoopConfiguration(configuration); final String outputLocation = hadoopConfiguration.get(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION); if (null != outputLocation) { // map back to a Hadoop stream for output memoryRDD.mapToPair(keyValue -> new Tuple2<>(new ObjectWritable<>(keyValue._1()), new ObjectWritable<>(keyValue._2()))) .saveAsNewAPIHadoopFile(Constants.getMemoryLocation(outputLocation, memoryKey), ObjectWritable.class, ObjectWritable.class, SequenceFileOutputFormat.class, hadoopConfiguration); try { return (Iterator) new ObjectWritableIterator(hadoopConfiguration, new Path(Constants.getMemoryLocation(outputLocation, memoryKey))); } catch (final IOException e) { throw new IllegalStateException(e.getMessage(), e); } } return Collections.emptyIterator(); } }