final GenericData.Array<Object> convertedArray = new GenericData.Array<>(array.length, schema); for (Object element : array) { convertedArray.add(convertFlinkType(elementSchema, element));
@Test public void testGetStringArrayUtf8() throws IOException { // Expectation: Even though we read an Avro object with UTF8 underneath, the accessor converts it into a // Java String List<String> expectedQuotes = ImmutableList.of("abc", "defg"); GenericData.Array<Utf8> strings = new GenericData.Array<Utf8>(2, Schema.createArray(Schema.create(Schema.Type.STRING))); expectedQuotes.forEach(s -> strings.add(new Utf8(s))); record.put("favorite_quotes", strings); Assert.assertEquals(accessor.getGeneric("favorite_quotes"), expectedQuotes); }
public void map(LongWritable key, Text value, OutputCollector<AvroWrapper<Pair<Long,GenericData.Record>>,NullWritable> out, Reporter reporter) throws IOException { GenericData.Record optional_entry = new GenericData.Record(createInnerSchema("optional_field_1")); optional_entry.put("optional_field_1", 0L); GenericData.Array<GenericData.Record> array = new GenericData.Array<>(1, createArraySchema()); array.add(optional_entry); GenericData.Record container = new GenericData.Record(createSchema()); container.put("Optional", array); out.collect(new AvroWrapper<>(new Pair<>(key.get(), container)), NullWritable.get()); } }
Object entryAvroValue = translate(list.get(i), elementDataSchema, elementAvroSchema); _path.removeLast(); avroList.add(entryAvroValue);
final GenericData.Array<Object> convertedArray = new GenericData.Array<>(array.length, schema); for (Object element : array) { convertedArray.add(convertFlinkType(elementSchema, element));
final GenericData.Array<Object> convertedArray = new GenericData.Array<>(array.length, schema); for (Object element : array) { convertedArray.add(convertFlinkType(elementSchema, element));
adeadServerNames = new GenericData.Array<CharSequence>(deadServerNames.size(), stringArraySchema); for (ServerName deadServerName : deadServerNames) { adeadServerNames.add(new Utf8(deadServerName.toString())); aserverInfos = new GenericData.Array<AServerInfo>(hserverInfos.size(), s); for (ServerName hsi : hserverInfos) { aserverInfos.add(hsiToASI(hsi, cs.getLoad(hsi)));
/** * Packs a Pig DataBag into an Avro array. * @param db the Pig databad to pack into the avro array * @param s The avro schema for which to determine the type * @return the avro array corresponding to the input bag * @throws IOException */ public static GenericData.Array<Object> packIntoAvro( final DataBag db, final Schema s) throws IOException { try { GenericData.Array<Object> array = new GenericData.Array<Object>(new Long(db.size()).intValue(), s); for (Tuple t : db) { if (s.getElementType() != null && s.getElementType().getType() == Type.RECORD) { array.add(packIntoAvro(t, s.getElementType())); } else if (t.size() == 1) { array.add(t.get(0)); } else { throw new IOException( "AvroStorageDataConversionUtilities.packIntoAvro: Can't pack " + t + " into schema " + s); } } return array; } catch (Exception e) { throw new IOException( "exception in AvroStorageDataConversionUtilities.packIntoAvro", e); } }
static public ATableDescriptor htdToATD(HTableDescriptor table) throws IOException { ATableDescriptor atd = new ATableDescriptor(); atd.name = ByteBuffer.wrap(table.getName()); Collection<HColumnDescriptor> families = table.getFamilies(); Schema afdSchema = Schema.createArray(AFamilyDescriptor.SCHEMA$); GenericData.Array<AFamilyDescriptor> afamilies = null; if (families.size() > 0) { afamilies = new GenericData.Array<AFamilyDescriptor>(families.size(), afdSchema); for (HColumnDescriptor hcd : families) { AFamilyDescriptor afamily = hcdToAFD(hcd); afamilies.add(afamily); } } else { afamilies = new GenericData.Array<AFamilyDescriptor>(0, afdSchema); } atd.families = afamilies; atd.maxFileSize = table.getMaxFileSize(); atd.memStoreFlushSize = table.getMemStoreFlushSize(); atd.rootRegion = table.isRootRegion(); atd.metaRegion = table.isMetaRegion(); atd.metaTable = table.isMetaTable(); atd.readOnly = table.isReadOnly(); atd.deferredLogFlush = table.isDeferredLogFlush(); return atd; }
static public AServerLoad hslToASL(HServerLoad hsl) throws IOException { AServerLoad asl = new AServerLoad(); asl.load = hsl.getLoad(); asl.maxHeapMB = hsl.getMaxHeapMB(); asl.memStoreSizeInMB = hsl.getMemStoreSizeInMB(); asl.numberOfRegions = hsl.getNumberOfRegions(); asl.numberOfRequests = hsl.getNumberOfRequests(); Collection<HServerLoad.RegionLoad> regionLoads = hsl.getRegionsLoad().values(); Schema s = Schema.createArray(ARegionLoad.SCHEMA$); GenericData.Array<ARegionLoad> aregionLoads = null; if (regionLoads != null) { aregionLoads = new GenericData.Array<ARegionLoad>(regionLoads.size(), s); for (HServerLoad.RegionLoad rl : regionLoads) { aregionLoads.add(hrlToARL(rl)); } } else { aregionLoads = new GenericData.Array<ARegionLoad>(0, s); } asl.regionsLoad = aregionLoads; asl.storefileIndexSizeInMB = hsl.getStorefileIndexSizeInMB(); asl.storefiles = hsl.getStorefiles(); asl.storefileSizeInMB = hsl.getStorefileSizeInMB(); asl.usedHeapMB = hsl.getUsedHeapMB(); return asl; }
static public AResult resultToAResult(Result result) { AResult aresult = new AResult(); byte[] row = result.getRow(); aresult.row = ByteBuffer.wrap(row != null ? row : new byte[1]); Schema s = Schema.createArray(AResultEntry.SCHEMA$); GenericData.Array<AResultEntry> entries = null; List<KeyValue> resultKeyValues = result.list(); if (resultKeyValues != null && resultKeyValues.size() > 0) { entries = new GenericData.Array<AResultEntry>(resultKeyValues.size(), s); for (KeyValue resultKeyValue : resultKeyValues) { AResultEntry entry = new AResultEntry(); entry.family = ByteBuffer.wrap(resultKeyValue.getFamily()); entry.qualifier = ByteBuffer.wrap(resultKeyValue.getQualifier()); entry.value = ByteBuffer.wrap(resultKeyValue.getValue()); entry.timestamp = resultKeyValue.getTimestamp(); entries.add(entry); } } else { entries = new GenericData.Array<AResultEntry>(0, s); } aresult.entries = entries; return aresult; }
public GenericArray<ATableDescriptor> listTables() throws AIOError { try { HTableDescriptor[] tables = admin.listTables(); Schema atdSchema = Schema.createArray(ATableDescriptor.SCHEMA$); GenericData.Array<ATableDescriptor> result = null; result = new GenericData.Array<ATableDescriptor>(tables.length, atdSchema); for (HTableDescriptor table : tables) { result.add(AvroUtil.htdToATD(table)); } return result; } catch (IOException e) { AIOError ioe = new AIOError(); ioe.message = new Utf8(e.getMessage()); throw ioe; } }
static public GenericArray<AResult> resultsToAResults(Result[] results) { Schema s = Schema.createArray(AResult.SCHEMA$); GenericData.Array<AResult> aresults = null; if (results != null && results.length > 0) { aresults = new GenericData.Array<AResult>(results.length, s); for (Result result : results) { aresults.add(resultToAResult(result)); } } else { aresults = new GenericData.Array<AResult>(0, s); } return aresults; } }
@Override public GenericData.Array<?> map(Collection<?> input) { if (schema == null) { schema = new Schema.Parser().parse(jsonSchema); } GenericData.Array array = new GenericData.Array(input.size(), schema); for (Object in : input) { array.add(mapFn.map(in)); } return array; } }
@Override public GenericData.Array<?> map(Collection<?> input) { if (schema == null) { schema = new Schema.Parser().parse(jsonSchema); } GenericData.Array array = new GenericData.Array(input.size(), schema); for (Object in : input) { array.add(mapFn.map(in)); } return array; } }
private Object createArray(Schema schema, Element el) { NodeList childNodes = el.getChildNodes(); Schema elementType = schema.getElementType(); int numElements = childNodes.getLength(); GenericData.Array array = new GenericData.Array(numElements, schema); for (int i = 0; i < numElements; i++) { Element child = (Element) childNodes.item(i); //noinspection unchecked array.add(createNodeDatum(elementType, child, true)); } return array; }
@Override public GenericData.Array<?> map(Collection<?> input) { if (schema == null) { schema = new Schema.Parser().parse(jsonSchema); } GenericData.Array array = new GenericData.Array(input.size(), schema); for (Object in : input) { array.add(mapFn.map(in)); } return array; } }
private GenericRecord generateRecordWithArrays() { ArrayList<Schema.Field> fields = new ArrayList<Schema.Field>(); String fieldName = "array1"; Schema fieldSchema = Schema.createArray(Schema.create(Schema.Type.STRING)); String docString = "doc"; fields.add(new Schema.Field(fieldName, fieldSchema, docString, null)); Schema schema = Schema.createRecord("name", docString, "test", false); schema.setFields(fields); GenericData.Record record = new GenericData.Record(schema); GenericData.Array<String> arr = new GenericData.Array<>(2, fieldSchema); arr.add("foobar"); arr.add("foobaz"); record.put("array1", arr); return record; } }