@Override public void checkSchema(ResourceSchema resourceSchema) throws IOException { /* Schema provided by user and the schema computed by Pig * at the time of calling store must match. */ Schema runtimeSchema = Schema.getPigSchema(resourceSchema); if (pigSchema != null) { if (!Schema.equals(runtimeSchema, pigSchema, false, true)) { throw new FrontendException("Schema provided in store statement doesn't match with the Schema" + "returned by Pig run-time. Schema provided in HCatStorer: " + pigSchema.toString() + " Schema received from Pig runtime: " + runtimeSchema.toString(), PigHCatUtil.PIG_EXCEPTION_CODE); } } else { pigSchema = runtimeSchema; } UDFContext.getUDFContext().getUDFProperties(this.getClass(), new String[]{sign}).setProperty(PIG_SCHEMA, ObjectSerializer.serialize(pigSchema)); }
@Override public void storeSchema(ResourceSchema schema, String location, Job job) throws IOException { init(location, job); data.setSchema(location, Schema.getPigSchema(schema)); }
@Override public void checkStoreSchema(ResourceFieldSchema schema) throws IOException { Preconditions.checkNotNull(schema, "Schema is null"); Preconditions.checkArgument(DataType.TUPLE == schema.getType(), "Expected schema type '%s' but found type '%s'", DataType.findTypeName(DataType.TUPLE), DataType.findTypeName(schema.getType())); ResourceSchema childSchema = schema.getSchema(); Preconditions.checkNotNull(childSchema, "Child schema is null"); Schema actualSchema = Schema.getPigSchema(childSchema); Preconditions.checkArgument(Schema.equals(expectedSchema, actualSchema, false, true), "Expected store schema '%s' but found schema '%s'", expectedSchema, actualSchema); }
@Override public void checkStoreSchema(ResourceFieldSchema schema) throws IOException { Preconditions.checkNotNull(schema, "Schema is null"); Preconditions.checkArgument(DataType.TUPLE == schema.getType(), "Expected schema type '%s' but found type '%s'", DataType.findTypeName(DataType.TUPLE), DataType.findTypeName(schema.getType())); ResourceSchema childSchema = schema.getSchema(); Preconditions.checkNotNull(childSchema, "Child schema is null"); Schema actualSchema = Schema.getPigSchema(childSchema); Preconditions.checkArgument(Schema.equals(expectedSchema, actualSchema, false, true), "Expected store schema '%s' but found schema '%s'", expectedSchema, actualSchema); }
private LogicalSchema getSchemaFromMetaData() throws FrontendException { if (getLoadFunc()!=null && getLoadFunc() instanceof LoadMetadata) { try { ResourceSchema resourceSchema = ((LoadMetadata)loadFunc).getSchema(getFileSpec().getFileName(), new Job(conf)); Schema oldSchema = Schema.getPigSchema(resourceSchema); return Util.translateSchema(oldSchema); } catch (IOException e) { throw new FrontendException( this, "Cannot get schema from loadFunc " + loadFunc.getClass().getName(), 2245, e); } } return null; }
@Override public Schema outputSchema(Schema input) { try { if (!inited) { schemaAndEvaluatorInfo.init(getInputSchema(), instantiateUDAF(funcName), Mode.COMPLETE, constantsInfo); inited = true; } ResourceFieldSchema rfs = HiveUtils.getResourceFieldSchema(schemaAndEvaluatorInfo.outputTypeInfo); ResourceSchema outputSchema = new ResourceSchema(); outputSchema.setFields(new ResourceFieldSchema[] {rfs}); return Schema.getPigSchema(outputSchema); } catch (Exception e) { throw new RuntimeException(e); } } }
@Override public Schema outputSchema(Schema input) { try { if (!inited) { schemaInfo.init(getInputSchema(), evalUDF, constantsInfo); inited = true; } ResourceFieldSchema rfs = HiveUtils.getResourceFieldSchema( TypeInfoUtils.getTypeInfoFromObjectInspector(schemaInfo.outputObjectInspector)); ResourceSchema outputSchema = new ResourceSchema(); outputSchema.setFields(new ResourceFieldSchema[] {rfs}); return Schema.getPigSchema(outputSchema); } catch (Exception e) { throw new RuntimeException(e); } }
@Override public void checkSchema(ResourceSchema resourceSchema) throws IOException { /* Schema provided by user and the schema computed by Pig * at the time of calling store must match. */ Schema runtimeSchema = Schema.getPigSchema(resourceSchema); if (pigSchema != null) { if (!Schema.equals(runtimeSchema, pigSchema, false, true)) { throw new FrontendException("Schema provided in store statement doesn't match with the Schema" + "returned by Pig run-time. Schema provided in HCatStorer: " + pigSchema.toString() + " Schema received from Pig runtime: " + runtimeSchema.toString(), PigHCatUtil.PIG_EXCEPTION_CODE); } } else { pigSchema = runtimeSchema; } UDFContext.getUDFContext().getUDFProperties(this.getClass(), new String[]{sign}).setProperty(PIG_SCHEMA, ObjectSerializer.serialize(pigSchema)); }
@Override public void checkSchema(ResourceSchema resourceSchema) throws IOException { /* Schema provided by user and the schema computed by Pig * at the time of calling store must match. */ Schema runtimeSchema = Schema.getPigSchema(resourceSchema); if (pigSchema != null) { if (!Schema.equals(runtimeSchema, pigSchema, false, true)) { throw new FrontendException("Schema provided in store statement doesn't match with the Schema" + "returned by Pig run-time. Schema provided in HCatStorer: " + pigSchema.toString() + " Schema received from Pig runtime: " + runtimeSchema.toString(), PigHCatUtil.PIG_EXCEPTION_CODE); } } else { pigSchema = runtimeSchema; } UDFContext.getUDFContext().getUDFProperties(this.getClass(), new String[]{sign}).setProperty(PIG_SCHEMA, ObjectSerializer.serialize(pigSchema)); }
return(Schema.getPigSchema((ResourceSchema) newSchema));
public static Schema getPigSchema(ResourceSchema rSchema) throws FrontendException { if(rSchema == null) { return null; } List<FieldSchema> fsList = new ArrayList<FieldSchema>(); for(ResourceFieldSchema rfs : rSchema.getFields()) { FieldSchema fs = new FieldSchema(rfs.getName(), rfs.getSchema() == null ? null : getPigSchema(rfs.getSchema()), rfs.getType()); if(rfs.getType() == DataType.BAG) { if (fs.schema != null) { // allow partial schema if (fs.schema.size() == 1) { FieldSchema innerFs = fs.schema.getField(0); if (innerFs.type != DataType.TUPLE) { ResourceFieldSchema.throwInvalidSchemaException(); } } else { ResourceFieldSchema.throwInvalidSchemaException(); } } } fsList.add(fs); } return new Schema(fsList); }
@Override public Schema outputSchema(Schema input) { try { if (!inited) { schemaInfo.init(getInputSchema(), udtf, constantsInfo); inited = true; } ResourceFieldSchema rfs = HiveUtils.getResourceFieldSchema( TypeInfoUtils.getTypeInfoFromObjectInspector(schemaInfo.outputObjectInspector)); ResourceSchema tupleSchema = new ResourceSchema(); tupleSchema.setFields(new ResourceFieldSchema[] {rfs}); ResourceFieldSchema bagFieldSchema = new ResourceFieldSchema(); bagFieldSchema.setType(DataType.BAG); bagFieldSchema.setSchema(tupleSchema); ResourceSchema bagSchema = new ResourceSchema(); bagSchema.setFields(new ResourceFieldSchema[] {bagFieldSchema}); return Schema.getPigSchema(bagSchema); } catch (Exception e) { throw new RuntimeException(e); } }
if (logSchema != null) { try { schema = Schema.getPigSchema(new ResourceSchema(logSchema)); } catch (FrontendException e) { throw new RuntimeException("LogicalSchema in foreach unable to be converted to Schema: " + logSchema, e);
continue; Schema toGen = Schema.getPigSchema(new ResourceSchema(logicalSchema)); Schema leftSchema = null; if (logicalSchema != null) { leftSchema = Schema.getPigSchema(new ResourceSchema(logicalSchema)); rightSchema = Schema.getPigSchema(new ResourceSchema(logicalSchema)); mergedSchema = Schema.getPigSchema(new ResourceSchema(logicalSchema));