public static ResourceSchema getResourceSchema(HCatSchema hcatSchema) throws IOException { List<ResourceFieldSchema> rfSchemaList = new ArrayList<ResourceFieldSchema>(); for (HCatFieldSchema hfs : hcatSchema.getFields()) { ResourceFieldSchema rfSchema; rfSchema = getResourceSchemaFromFieldSchema(hfs); rfSchemaList.add(rfSchema); } ResourceSchema rSchema = new ResourceSchema(); rSchema.setFields(rfSchemaList.toArray(new ResourceFieldSchema[rfSchemaList.size()])); return rSchema; }
private static ResourceSchema getTupleSubSchema(HCatFieldSchema hfs) throws IOException { // for each struct subfield, create equivalent ResourceFieldSchema ResourceSchema s = new ResourceSchema(); List<ResourceFieldSchema> lrfs = new ArrayList<ResourceFieldSchema>(); for (HCatFieldSchema subField : hfs.getStructSubSchema().getFields()) { lrfs.add(getResourceSchemaFromFieldSchema(subField)); } s.setFields(lrfs.toArray(new ResourceFieldSchema[lrfs.size()])); return s; }
@SuppressWarnings({ "unchecked", "rawtypes" }) @Override public void prepareToWrite(RecordWriter writer) throws IOException { this.writer = writer; Properties props = getUDFProperties(); String s = props.getProperty(ResourceSchema.class.getName()); if (!StringUtils.hasText(s)) { log.warn("No resource schema found; using an empty one...."); this.schema = new ResourceSchema(); } else { this.schema = IOUtils.deserializeFromBase64(s); } this.pigTuple = new PigTuple(schema); }
private ResourceSchema createSchema(String schema) { try { return new ResourceSchema(Utils.getSchemaFromString(schema)); } catch (Exception ex) { throw new RuntimeException(ex); } }
private ResourceSchema createSchema(String schema) { try { return new ResourceSchema(Utils.getSchemaFromString(schema)); } catch (Exception ex) { throw new RuntimeException(ex); } }
private ResourceSchema createSchema(String schema) { try { return new ResourceSchema(Utils.getSchemaFromString(schema)); } catch (Exception ex) { throw new RuntimeException(ex); } }
private ResourceSchema createSchema(String schema) { try { return new ResourceSchema(Utils.getSchemaFromString(schema)); } catch (Exception ex) { throw new RuntimeException(ex); } }
bagSubFieldSchemas[0].setSchema(getTupleSubSchema(arrayElementFieldSchema)); } else if (arrayElementFieldSchema.getType() == Type.ARRAY) { ResourceSchema s = new ResourceSchema(); List<ResourceFieldSchema> lrfs = Arrays.asList(getResourceSchemaFromFieldSchema(arrayElementFieldSchema)); s.setFields(lrfs.toArray(new ResourceFieldSchema[lrfs.size()])); .setType(getPigType(arrayElementFieldSchema)) bagSubFieldSchemas[0].setSchema(new ResourceSchema().setFields(innerTupleFieldSchemas)); return new ResourceSchema().setFields(bagSubFieldSchemas);
public static ResourceSchema getResourceSchema(final Configuration configuration, Dependencies dependencies) throws IOException { final ResourceSchema schema = new ResourceSchema(); try { List<ColumnInfo> columns = null;
@Test public void testSchema() throws SQLException, IOException { final Configuration configuration = mock(Configuration.class); when(configuration.get(PhoenixConfigurationUtil.SCHEMA_TYPE)).thenReturn(SchemaType.TABLE.name()); final ResourceSchema actual = PhoenixPigSchemaUtil.getResourceSchema( configuration, new Dependencies() { List<ColumnInfo> getSelectColumnMetadataList( Configuration configuration) throws SQLException { return Lists.newArrayList(ID_COLUMN, NAME_COLUMN); } }); // expected schema. final ResourceFieldSchema[] fields = new ResourceFieldSchema[2]; fields[0] = new ResourceFieldSchema().setName("ID") .setType(DataType.LONG); fields[1] = new ResourceFieldSchema().setName("NAME") .setType(DataType.CHARARRAY); final ResourceSchema expected = new ResourceSchema().setFields(fields); assertEquals(expected.toString(), actual.toString()); }
@Override public ResourceSchema getSchema(String location, Job job) throws IOException { init(location, job); return schema == null ? null : new ResourceSchema(schema); }
public static ResourceSchema getResourceSchema(HCatSchema hcatSchema) throws IOException { List<ResourceFieldSchema> rfSchemaList = new ArrayList<ResourceFieldSchema>(); for (HCatFieldSchema hfs : hcatSchema.getFields()) { ResourceFieldSchema rfSchema; rfSchema = getResourceSchemaFromFieldSchema(hfs); rfSchemaList.add(rfSchema); } ResourceSchema rSchema = new ResourceSchema(); rSchema.setFields(rfSchemaList.toArray(new ResourceFieldSchema[rfSchemaList.size()])); return rSchema; }
public static ResourceSchema getResourceSchema(HCatSchema hcatSchema) throws IOException { List<ResourceFieldSchema> rfSchemaList = new ArrayList<ResourceFieldSchema>(); for (HCatFieldSchema hfs : hcatSchema.getFields()) { ResourceFieldSchema rfSchema; rfSchema = getResourceSchemaFromFieldSchema(hfs); rfSchemaList.add(rfSchema); } ResourceSchema rSchema = new ResourceSchema(); rSchema.setFields(rfSchemaList.toArray(new ResourceFieldSchema[rfSchemaList.size()])); return rSchema; }
@Override public ResourceSchema getSchema(String location, Job job) throws IOException { if (DEBUG) LOG.debug("LoadMetadata.getSchema(" + location + ", " + job + ")"); setInput(location, job); return new ResourceSchema(schema); }
private static ResourceSchema getTupleSubSchema(HCatFieldSchema hfs) throws IOException { // for each struct subfield, create equivalent ResourceFieldSchema ResourceSchema s = new ResourceSchema(); List<ResourceFieldSchema> lrfs = new ArrayList<ResourceFieldSchema>(); for (HCatFieldSchema subField : hfs.getStructSubSchema().getFields()) { lrfs.add(getResourceSchemaFromFieldSchema(subField)); } s.setFields(lrfs.toArray(new ResourceFieldSchema[lrfs.size()])); return s; }
private static ResourceSchema getTupleSubSchema(HCatFieldSchema hfs) throws IOException { // for each struct subfield, create equivalent ResourceFieldSchema ResourceSchema s = new ResourceSchema(); List<ResourceFieldSchema> lrfs = new ArrayList<ResourceFieldSchema>(); for (HCatFieldSchema subField : hfs.getStructSubSchema().getFields()) { lrfs.add(getResourceSchemaFromFieldSchema(subField)); } s.setFields(lrfs.toArray(new ResourceFieldSchema[lrfs.size()])); return s; }
public static ResourceSchema convert(Schema icebergSchema) throws IOException { ResourceSchema result = new ResourceSchema(); result.setFields(convertFields(icebergSchema.columns())); return result; }
@Override public ResourceSchema getSchema(String filename, Job job) throws IOException { // getSchema usually should only be called after setLocation, but it is not always enforced. if (job != null) { ThriftToPig.setConversionProperties(HadoopCompat.getConfiguration(job)); } return new ResourceSchema(ThriftToPig.toSchema(typeRef.getRawClass())); }
static public void storeCleanup(POStore store, Configuration conf) throws IOException { StoreFuncInterface storeFunc = store.getStoreFunc(); if (storeFunc instanceof StoreMetadata) { Schema schema = store.getSchema(); if (schema != null) { ((StoreMetadata) storeFunc).storeSchema( new ResourceSchema(schema, store.getSortInfo()), store.getSFile() .getFileName(), new Job(conf)); } } }
@Override public ResourceSchema getSchema(String arg0, Job arg1) throws IOException { return new ResourceSchema(new Schema(new FieldSchema("data", DataType.MAP))); }