@Override public FloatValue createValue() { return new FloatValue(); }
@Override public FloatValue deserialize(FloatValue reuse, DataInputView source) throws IOException { reuse.read(source); return reuse; }
break; case FloatValue: upload.setFloat(index, ((FloatValue) value).getValue()); break; case IntValue:
@Override public int parseField(byte[] bytes, int startPos, int limit, char delim, FloatValue reusable) { int i = startPos; final byte delByte = (byte) delim; while (i < limit && bytes[i] != delByte) { i++; } String str = new String(bytes, startPos, i-startPos); try { float value = Float.parseFloat(str); reusable.setValue(value); this.result = reusable; return (i == limit) ? limit : i+1; } catch (NumberFormatException e) { setErrorState(ParseErrorState.NUMERIC_VALUE_FORMAT_ERROR); return -1; } }
@Override public void serialize(FloatValue record, DataOutputView target) throws IOException { record.write(target); }
@SuppressWarnings("unchecked") private<T> T convert(Record stratosphereType, int pos, Class<T> hadoopType) { if(hadoopType == LongWritable.class ) { return (T) new LongWritable((stratosphereType.getField(pos, LongValue.class)).getValue()); } if(hadoopType == org.apache.hadoop.io.Text.class) { return (T) new Text((stratosphereType.getField(pos, StringValue.class)).getValue()); } if(hadoopType == org.apache.hadoop.io.IntWritable.class) { return (T) new IntWritable((stratosphereType.getField(pos, IntValue.class)).getValue()); } if(hadoopType == org.apache.hadoop.io.FloatWritable.class) { return (T) new FloatWritable((stratosphereType.getField(pos, FloatValue.class)).getValue()); } if(hadoopType == org.apache.hadoop.io.DoubleWritable.class) { return (T) new DoubleWritable((stratosphereType.getField(pos, DoubleValue.class)).getValue()); } if(hadoopType == org.apache.hadoop.io.BooleanWritable.class) { return (T) new BooleanWritable((stratosphereType.getField(pos, BooleanValue.class)).getValue()); } if(hadoopType == org.apache.hadoop.io.ByteWritable.class) { return (T) new ByteWritable((stratosphereType.getField(pos, ByteValue.class)).getValue()); } throw new RuntimeException("Unable to convert Stratosphere type ("+stratosphereType.getClass().getCanonicalName()+") to Hadoop."); } }
@Override public int parseField(byte[] bytes, int startPos, int limit, char delim, FloatValue reusable) { int i = startPos; final byte delByte = (byte) delim; while (i < limit && bytes[i] != delByte) { i++; } String str = new String(bytes, startPos, i-startPos); try { float value = Float.parseFloat(str); reusable.setValue(value); this.result = reusable; return (i == limit) ? limit : i+1; } catch (NumberFormatException e) { setErrorState(ParseErrorState.NUMERIC_VALUE_FORMAT_ERROR); return -1; } }
@Override public void serialize(FloatValue record, DataOutputView target) throws IOException { record.write(target); }
@Override public FloatValue createInstance() { return new FloatValue(); }
private final Value convertAvroPrimitiveToValue(Type type, Object avroRecord) { switch (type) { case STRING: sString.setValue((CharSequence) avroRecord); return sString; case INT: sInt.setValue((Integer) avroRecord); return sInt; case BOOLEAN: sBool.setValue((Boolean) avroRecord); return sBool; case DOUBLE: sDouble.setValue((Double) avroRecord); return sDouble; case FLOAT: sFloat.setValue((Float) avroRecord); return sFloat; case LONG: sLong.setValue((Long) avroRecord); return sLong; case NULL: return NullValue.getInstance(); default: throw new RuntimeException( "Type " + type + " for AvroInputFormat is not implemented. Open an issue on GitHub."); } }
@Override public FloatValue deserialize(FloatValue reuse, DataInputView source) throws IOException { reuse.read(source); return reuse; }
@Override public FloatValue createValue() { return new FloatValue(); }
@Override public FloatValue createInstance() { return new FloatValue(); }
@Override public FloatValue[] getValidTestResults() { return new FloatValue[] { new FloatValue(0f), new FloatValue(0.0f), new FloatValue(123.4f), new FloatValue(0.124f), new FloatValue(.623f), new FloatValue(1234f), new FloatValue(-12.34f), new FloatValue(Float.MAX_VALUE), new FloatValue(Float.MIN_VALUE), new FloatValue(Float.NEGATIVE_INFINITY), new FloatValue(Float.POSITIVE_INFINITY), new FloatValue(Float.NaN), new FloatValue(1.234E2f), new FloatValue(1.234e3f), new FloatValue(1.234E-2f) }; }
return new FloatValue(((java.lang.Float)java).floatValue());
return new FloatValue(((java.lang.Float)java).floatValue());
protected Value convert(Object hadoopType) { if(hadoopType instanceof org.apache.hadoop.io.LongWritable ) { return new LongValue(((LongWritable)hadoopType).get()); } if(hadoopType instanceof org.apache.hadoop.io.Text) { return new StringValue(((Text)hadoopType).toString()); } if(hadoopType instanceof org.apache.hadoop.io.IntWritable) { return new IntValue(((IntWritable)hadoopType).get()); } if(hadoopType instanceof org.apache.hadoop.io.FloatWritable) { return new FloatValue(((FloatWritable)hadoopType).get()); } if(hadoopType instanceof org.apache.hadoop.io.DoubleWritable) { return new DoubleValue(((DoubleWritable)hadoopType).get()); } if(hadoopType instanceof org.apache.hadoop.io.BooleanWritable) { return new BooleanValue(((BooleanWritable)hadoopType).get()); } if(hadoopType instanceof org.apache.hadoop.io.ByteWritable) { return new ByteValue(((ByteWritable)hadoopType).get()); } if (hadoopType instanceof NullWritable) { return NullValue.getInstance(); } throw new RuntimeException("Unable to convert Hadoop type ("+hadoopType.getClass().getCanonicalName()+") to Stratosphere."); } }
FloatMapValue fm = new FloatMapValue(); for (Map.Entry<CharSequence, ?> entry : avroMap.entrySet()) { fm.put(new StringValue((CharSequence) entry.getKey()), new FloatValue((Float) entry.getValue()));