@Override public void readFields(DataInput in) throws IOException { try { query = IOUtils.deserialize(conf, in, null); } catch (ClassNotFoundException ex) { throw new IOException(ex); } }
@Override public void readFields(DataInput in) throws IOException { try { query = IOUtils.deserialize(conf, in, null); } catch (ClassNotFoundException ex) { throw new IOException(ex); } }
/** * Deserializes the object in the given data input using * available Hadoop serializations. * * @param conf Hadoop conf. * @param in data input stream where serialized content is read. * @param <T> object class type. * @param obj data object. * @param objClass object class type as String. * @return deserialized object. * @throws IOException occurred while deserializing the byte content. * @throws ClassNotFoundException class definition cannot be found for given class name. */ @SuppressWarnings("unchecked") public static<T> T deserialize(Configuration conf, DataInput in , T obj , String objClass) throws IOException, ClassNotFoundException { Class<T> c = (Class<T>) ClassLoadingUtils.loadClass(objClass); return deserialize(conf, in, obj, c); }
/** * Deserializes the object in the given data input using * available Hadoop serializations. * * @param conf Hadoop conf. * @param in data input stream where serialized content is read. * @param <T> object class type. * @param obj data object. * @param objClass object class type as String. * @return deserialized object. * @throws IOException occurred while deserializing the byte content. * @throws ClassNotFoundException class definition cannot be found for given class name. */ @SuppressWarnings("unchecked") public static<T> T deserialize(Configuration conf, DataInput in , T obj , String objClass) throws IOException, ClassNotFoundException { Class<T> c = (Class<T>) ClassLoadingUtils.loadClass(objClass); return deserialize(conf, in, obj, c); }
/** * Deserializes the object in the given data input using * available Hadoop serializations. * * @param conf Hadoop conf. * @param in data input stream where serialized content is read. * @param <T> object class type. * @param obj data object. * @throws IOException occurred while deserializing the byte content. * @throws ClassNotFoundException class definition cannot be found for given class name. * @return deserialized object. */ @SuppressWarnings("unchecked") public static<T> T deserialize(Configuration conf, DataInput in , T obj) throws IOException, ClassNotFoundException { String clazz = Text.readString(in); Class<T> c = (Class<T>)ClassLoadingUtils.loadClass(clazz); return deserialize(conf, in, obj, c); }
/** * Deserializes the object in the given data input using * available Hadoop serializations. * * @param conf Hadoop conf. * @param in data input stream where serialized content is read. * @param <T> object class type. * @param obj data object. * @throws IOException occurred while deserializing the byte content. * @throws ClassNotFoundException class definition cannot be found for given class name. * @return deserialized object. */ @SuppressWarnings("unchecked") public static<T> T deserialize(Configuration conf, DataInput in , T obj) throws IOException, ClassNotFoundException { String clazz = Text.readString(in); Class<T> c = (Class<T>)ClassLoadingUtils.loadClass(clazz); return deserialize(conf, in, obj, c); }
/** * Deserializes the object in the given data input using * available Hadoop serializations. * * @param conf Hadoop conf. * @param in serialized byte array of object. * @param <T> object class type. * @param obj data object. * @throws IOException occurred while deserializing the byte content. * @throws ClassNotFoundException class definition cannot be found for given class name. * @return deserialized object. */ public static<T> T deserialize(Configuration conf, byte[] in , T obj) throws IOException, ClassNotFoundException { try (DataInputBuffer buffer = new DataInputBuffer()) { buffer.reset(in, in.length); return deserialize(conf, buffer, obj); } }
/** * Deserializes the object in the given data input using * available Hadoop serializations. * * @param conf Hadoop conf. * @param in serialized byte array of object. * @param <T> object class type. * @param obj data object. * @throws IOException occurred while deserializing the byte content. * @throws ClassNotFoundException class definition cannot be found for given class name. * @return deserialized object. */ public static<T> T deserialize(Configuration conf, byte[] in , T obj) throws IOException, ClassNotFoundException { try (DataInputBuffer buffer = new DataInputBuffer()) { buffer.reset(in, in.length); return deserialize(conf, buffer, obj); } }
@Override public void readFields(DataInput in) throws IOException { super.readFields(in); try { baseQuery = IOUtils.deserialize(getConf(), in, null); } catch (ClassNotFoundException ex) { throw new IOException(ex); } locations = IOUtils.readStringArray(in); //we should override the data store as basequery's data store //also we may not call super.readFields so that temporary this.dataStore //is not created at all this.dataStore = (DataStoreBase<K, T>) baseQuery.getDataStore(); }
@SuppressWarnings("unchecked") public static <T> T fromBytes( byte[] val, Schema schema , SpecificDatumReader<T> datumReader, T object) throws IOException { Type type = schema.getType(); switch (type) { case ENUM: String symbol = schema.getEnumSymbols().get(val[0]); return (T)Enum.valueOf(ReflectData.get().getClass(schema), symbol); case STRING: return (T)new Utf8(toString(val)); case BYTES: return (T)ByteBuffer.wrap(val); case INT: return (T)Integer.valueOf(bytesToVint(val)); case LONG: return (T)Long.valueOf(bytesToVlong(val)); case FLOAT: return (T)Float.valueOf(toFloat(val)); case DOUBLE: return (T)Double.valueOf(toDouble(val)); case BOOLEAN: return (T)Boolean.valueOf(val[0] != 0); case RECORD: //fall case MAP: case ARRAY: return (T)IOUtils.deserialize(val, (SpecificDatumReader<SpecificRecord>)datumReader, (SpecificRecord)object); default: throw new RuntimeException("Unknown type: "+type); } }
@Override public void readFields(DataInput in) throws IOException { super.readFields(in); try { baseQuery = IOUtils.deserialize(getConf(), in, null); } catch (ClassNotFoundException ex) { throw new IOException(ex); } locations = IOUtils.readStringArray(in); //we should override the data store as basequery's data store //also we may not call super.readFields so that temporary this.dataStore //is not created at all this.dataStore = (DataStoreBase<K, T>) baseQuery.getDataStore(); }
@SuppressWarnings("unchecked") public static <T> T fromBytes( byte[] val, Schema schema , SpecificDatumReader<T> datumReader, T object) throws IOException { Type type = schema.getType(); switch (type) { case ENUM: String symbol = schema.getEnumSymbols().get(val[0]); return (T)Enum.valueOf(ReflectData.get().getClass(schema), symbol); case STRING: return (T)new Utf8(toString(val)); case BYTES: return (T)ByteBuffer.wrap(val); case INT: return (T)Integer.valueOf(bytesToVint(val)); case LONG: return (T)Long.valueOf(bytesToVlong(val)); case FLOAT: return (T)Float.valueOf(toFloat(val)); case DOUBLE: return (T)Double.valueOf(toDouble(val)); case BOOLEAN: return (T)Boolean.valueOf(val[0] != 0); case RECORD: //fall case MAP: case ARRAY: return (T)IOUtils.deserialize(val, (SpecificDatumReader<SpecificRecord>)datumReader, (SpecificRecord)object); default: throw new RuntimeException("Unknown type: "+type); } }
/** * Utility method for deep clone a given AVRO persistent bean instance. * * @param persistent source persistent bean instance. * @param <T> persistent bean type. * @return cloned persistent bean to be returned. */ public static <T extends PersistentBase> T deepClonePersistent(T persistent) { final SpecificDatumWriter<PersistentBase> writer = new SpecificDatumWriter<>(persistent.getSchema()); final byte[] byteData; try { byteData = IOUtils.serialize(writer, persistent); } catch (IOException e) { throw new RuntimeException( "Unable to serialize avro object to byte buffer - " + "please report this issue to the Gora bugtracker " + "or your administrator."); } @SuppressWarnings("unchecked") final SpecificDatumReader<T> reader = new SpecificDatumReader<>((Class<T>) persistent.getClass()); try { return IOUtils.deserialize(byteData, reader, null); } catch (IOException e) { throw new RuntimeException( "Unable to deserialize avro object from byte buffer - " + "please report this issue to the Gora bugtracker " + "or your administrator."); } }
/** * Utility method for deep clone a given AVRO persistent bean instance. * * @param persistent source persistent bean instance. * @param <T> persistent bean type. * @return cloned persistent bean to be returned. */ public static <T extends PersistentBase> T deepClonePersistent(T persistent) { final SpecificDatumWriter<PersistentBase> writer = new SpecificDatumWriter<>(persistent.getSchema()); final byte[] byteData; try { byteData = IOUtils.serialize(writer, persistent); } catch (IOException e) { throw new RuntimeException( "Unable to serialize avro object to byte buffer - " + "please report this issue to the Gora bugtracker " + "or your administrator."); } @SuppressWarnings("unchecked") final SpecificDatumReader<T> reader = new SpecificDatumReader<>((Class<T>) persistent.getClass()); try { return IOUtils.deserialize(byteData, reader, null); } catch (IOException e) { throw new RuntimeException( "Unable to deserialize avro object from byte buffer - " + "please report this issue to the Gora bugtracker " + "or your administrator."); } }
@SuppressWarnings("rawtypes") SpecificDatumReader reader = getDatumReader(fieldSchema); fieldValue = IOUtils.deserialize((byte[]) solrValue, reader, persistent.get(field.pos())); break; @SuppressWarnings("rawtypes") SpecificDatumReader unionReader = getDatumReader(fieldSchema); fieldValue = IOUtils.deserialize((byte[]) solrValue, unionReader, persistent.get(field.pos())); break;
case RECORD: @SuppressWarnings("rawtypes") SpecificDatumReader reader = getDatumReader(fieldSchema); fieldValue = IOUtils.deserialize((byte[]) igniteValue, reader, persistent.get(field.pos())); break; } else { reader = getDatumReader(fieldSchema); fieldValue = IOUtils.deserialize((byte[]) igniteValue, reader, persistent.get(field.pos()));
fields = IOUtils.readStringArray(in); if(!nullFields[2]) startKey = IOUtils.deserialize(getConf(), in, null, dataStore.getKeyClass()); if(!nullFields[3]) endKey = IOUtils.deserialize(getConf(), in, null, dataStore.getKeyClass()); if(!nullFields[4]) { String filterClass = Text.readString(in);
fields = IOUtils.readStringArray(in); if(!nullFields[2]) startKey = IOUtils.deserialize(getConf(), in, null, dataStore.getKeyClass()); if(!nullFields[3]) endKey = IOUtils.deserialize(getConf(), in, null, dataStore.getKeyClass()); if(!nullFields[4]) { String filterClass = Text.readString(in);
T after = IOUtils.deserialize(conf, dis, null, (Class<T>)before.getClass()); if (before instanceof BoolArrayWrapper) { if (after instanceof BoolArrayWrapper) {