/** * Checks to see if the given field is a schema-less Map that has values. * @return true if Map has no schema but has values (mixed schema map). false if not a Map or if Map is just empty. */ private boolean isPopulatedMixedValueMap(ResourceFieldSchema schema, int field, Tuple object) { if (schema.getType() != DataType.MAP) { // Can't be a mixed value map if it's not a map at all. return false; } try { Object fieldValue = object.get(field); Map<?, ?> map = (Map<?, ?>) fieldValue; return schema.getSchema() == null && !(map == null || map.isEmpty()); } catch (ExecException e) { throw new EsHadoopIllegalStateException(e); } }
if (fieldName.equals(field.getName())) { foundField = true; byte type = field.getType(); try { Object object = pt.getTuple().get(i);
private Result write(Object object, ResourceFieldSchema field, Generator generator) { byte type = (field != null ? field.getType() : DataType.findType(object));
/** * Checks to see if the given field is a schema-less Map that has values. * @return true if Map has no schema but has values (mixed schema map). false if not a Map or if Map is just empty. */ private boolean isPopulatedMixedValueMap(ResourceFieldSchema schema, int field, Tuple object) { if (schema.getType() != DataType.MAP) { // Can't be a mixed value map if it's not a map at all. return false; } try { Object fieldValue = object.get(field); Map<?, ?> map = (Map<?, ?>) fieldValue; return schema.getSchema() == null && !(map == null || map.isEmpty()); } catch (ExecException e) { throw new EsHadoopIllegalStateException(e); } }
private Object consumeComplexType(PushbackInputStream in, ResourceFieldSchema complexFieldSchema) throws IOException { Object field; switch (complexFieldSchema.getType()) { case DataType.BAG: field = consumeBag(in, complexFieldSchema); break; case DataType.TUPLE: field = consumeTuple(in, complexFieldSchema); break; case DataType.MAP: field = consumeMap(in, complexFieldSchema); break; default: throw new IOException("Unknown complex data type"); } return field; }
static boolean isComplexType(ResourceFieldSchema fieldSchema) { return (fieldSchema != null && fieldSchema.getType() >= 100); } }
protected byte schemaToType(Object o, ResourceFieldSchema fieldSchema) { return (fieldSchema == null) ? DataType.findType(o) : fieldSchema .getType(); }
@Override public void checkStoreSchema(ResourceFieldSchema schema) throws IOException { Preconditions.checkNotNull(schema); if (schema.getType() != DataType.BYTEARRAY) throw new IOException("Expected Pig type '" + DataType.findTypeName(DataType.BYTEARRAY) + "' but found '" + DataType.findTypeName(schema.getType()) + "'"); }
@Override public void checkStoreSchema(ResourceFieldSchema schema) throws IOException { Preconditions.checkNotNull(schema, "Schema is null"); Preconditions.checkArgument(DataType.TUPLE == schema.getType(), "Expected schema type '%s' but found type '%s'", DataType.findTypeName(DataType.TUPLE), DataType.findTypeName(schema.getType())); ResourceSchema childSchema = schema.getSchema(); Preconditions.checkNotNull(childSchema, "Child schema is null"); Schema actualSchema = Schema.getPigSchema(childSchema); Preconditions.checkArgument(Schema.equals(expectedSchema, actualSchema, false, true), "Expected store schema '%s' but found schema '%s'", expectedSchema, actualSchema); }
protected byte schemaToType(Object o, int i, ResourceFieldSchema[] fieldSchemas) { return (fieldSchemas == null) ? DataType.findType(o) : fieldSchemas[i] .getType(); }
@Override public void checkStoreSchema(ResourceFieldSchema schema) throws IOException { switch (schema.getType()) { case DataType.CHARARRAY: case DataType.INTEGER: case DataType.LONG: case DataType.FLOAT: case DataType.DOUBLE: return; } throw new IOException("Pig type '" + DataType.findTypeName(schema.getType()) + "' unsupported"); }
private Object bytesToObject(byte[] b, ResourceFieldSchema fs) throws IOException { Object field; if (DataType.isComplex(fs.getType())) { ByteArrayInputStream bis = new ByteArrayInputStream(b); PushbackInputStream in = new PushbackInputStream(bis); field = consumeComplexType(in, fs); } else { field = parseSimpleType(b, fs); } return field; }
@Override public void checkStoreSchema(ResourceFieldSchema schema) throws IOException { switch (schema.getType()) { case DataType.CHARARRAY: case DataType.INTEGER: case DataType.LONG: case DataType.FLOAT: case DataType.DOUBLE: return; } throw new IOException("Pig type '" + DataType.findTypeName(schema.getType()) + "' unsupported"); }
@Override public void checkStoreSchema(ResourceFieldSchema schema) throws IOException { Preconditions.checkNotNull(schema, "Schema is null"); Preconditions.checkArgument(DataType.TUPLE == schema.getType(), "Expected schema type '%s' but found type '%s'", DataType.findTypeName(DataType.TUPLE), DataType.findTypeName(schema.getType())); ResourceSchema childSchema = schema.getSchema(); Preconditions.checkNotNull(childSchema, "Child schema is null"); Schema actualSchema = Schema.getPigSchema(childSchema); Preconditions.checkArgument(Schema.equals(expectedSchema, actualSchema, false, true), "Expected store schema '%s' but found schema '%s'", expectedSchema, actualSchema); }
public ResourceSchema fixSchema(ResourceSchema s){ for (ResourceFieldSchema filed : s.getFields()) { if(filed.getType() == DataType.NULL) filed.setType(DataType.BYTEARRAY); } return s; }
switch (fieldSchema.getType()) { case DataType.BYTEARRAY: byte[] bytes = PDataType.fromTypeId(PBinary.INSTANCE.getSqlType()).toBytes(object);
@Override public void putNext(Tuple t) throws IOException { ResourceFieldSchema[] fieldSchemas = (schema == null) ? null : schema.getFields(); PhoenixRecordWritable record = new PhoenixRecordWritable(this.columnInfo); try { for(int i=0; i<t.size(); i++) { Object value = t.get(i); if(value == null) { record.add(null); continue; } ColumnInfo cinfo = this.columnInfo.get(i); byte type = (fieldSchemas == null) ? DataType.findType(value) : fieldSchemas[i].getType(); PDataType pDataType = PDataType.fromTypeId(cinfo.getSqlType()); Object v = TypeUtil.castPigTypeToPhoenix(value, type, pDataType); record.add(v); } this.writer.write(null, record); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new RuntimeException(e); } catch (SQLException e) { LOG.error("Error on tuple {} .",t); throw new IOException(e); } }
public void write(PreparedStatement statement, List<ColumnInfo> columnMetadataList) throws SQLException { for (int i = 0; i < columnMetadataList.size(); i++) { Object o = values.get(i); byte type = (fieldSchemas == null) ? DataType.findType(o) : fieldSchemas[i].getType(); Object upsertValue = convertTypeSpecificValue(o, type, columnMetadataList.get(i).getSqlType()); if (upsertValue != null) { statement.setObject(i + 1, upsertValue, columnMetadataList.get(i).getSqlType()); } else { statement.setNull(i + 1, columnMetadataList.get(i).getSqlType()); } } statement.execute(); }
static boolean isComplexType(ResourceFieldSchema fieldSchema) { return (fieldSchema != null && fieldSchema.getType() >= 100); } }