for (int i = 0; i < fields.length && !foundField; i++) { ResourceFieldSchema field = fields[i]; if (fieldName.equals(field.getName())) { foundField = true; byte type = field.getType();
String name = nestedFields[i].getName();
private String getReqiredColumnNamesString(ResourceSchema schema) { StringBuilder sb = new StringBuilder(); for (ResourceFieldSchema field : schema.getFields()) { sb.append(field.getName()).append(","); } if(sb.charAt(sb.length() -1) == ',') { sb.deleteCharAt(sb.length() - 1); } return sb.toString(); }
} catch (NumberFormatException nfe1) { throw new NumberFormatException("Error while trying to parse " + val + " into an Integer for field [fieldindex= " + fieldIndex + "] " + field.getName() + "\n" + value.toString()); tuple.set(fieldIndex, Long.parseLong(val)); } catch (NumberFormatException nfe2) { throw new NumberFormatException("Error while trying to parse " + val + " into a Long for field " + field.getName() + "\n" + value.toString()); tuple.set(fieldIndex, new BigDecimal(val)); } catch (NumberFormatException nfe2) { throw new NumberFormatException("Error while trying to parse " + val + " into a BigDecimal for field " + field.getName() + "\n" + value.toString()); if ("event_list".equals(field.getName())) { DataBag bag = bagFactory.newDefaultBag(); String[] events = val.split(","); throw new IOException("Can not process bags for the field " + field.getName() + ". Can only process for the event_list field.");
String name = nestedFields[i].getName();
for (ResourceFieldSchema subFs : fs.getSchema().getFields()) { TypeInfo info = getTypeInfo(subFs); names.add(subFs.getName()); typeInfos.add(info);
json.writeNullField(field.getName()); return; json.writeBooleanField(field.getName(), (Boolean)d); return; json.writeNumberField(field.getName(), (Integer)d); return; json.writeNumberField(field.getName(), (Long)d); return; json.writeNumberField(field.getName(), (Float)d); return; json.writeNumberField(field.getName(), (Double)d); return; json.writeStringField(field.getName(), d.toString()); return; json.writeStringField(field.getName(), d.toString()); return; json.writeStringField(field.getName(), (String)d); return; json.writeFieldName(field.getName()); json.writeNumber((BigInteger)d);
for (int i = 0; i < fields.length && !foundField; i++) { ResourceFieldSchema field = fields[i]; if (fieldName.equals(field.getName())) { foundField = true; byte type = field.getType();
Integer i = 0; for (ResourceSchema.ResourceFieldSchema rfs : rs.getFields()) { String rfsName = toAvroName(rfs.getName(), doubleColonsToDoubleUnderscores); Schema fieldSchema = resourceFieldSchemaToAvroSchema(
String fieldName = fieldSchema.getName(); byte dataTypeId = fieldSchema.getType();
if (rfs1[i].getName()==null && rfs2[i].getName()!=null || rfs1[i].getName()!=null && rfs2[i].getName()==null) return false; if (rfs1[i].getName()==null && rfs2[i].getName()==null) { if (rfs1[i].getType() == rfs2[i].getType()) return true; return false; if (!rfs1[i].getName().equals(rfs2[i].getName()) || rfs1[i].getType() != rfs2[i].getType()) { return false;
@Override public List<String> getPredicateFields(String location, Job job) throws IOException { ResourceSchema schema = getSchema(location, job); List<String> predicateFields = new ArrayList<String>(); for (ResourceFieldSchema field : schema.getFields()) { switch(field.getType()) { case DataType.BOOLEAN: case DataType.INTEGER: case DataType.LONG: case DataType.FLOAT: case DataType.DOUBLE: case DataType.DATETIME: case DataType.CHARARRAY: case DataType.BIGINTEGER: case DataType.BIGDECIMAL: predicateFields.add(field.getName()); break; default: // Skip DataType.BYTEARRAY, DataType.TUPLE, DataType.MAP and DataType.BAG break; } } return predicateFields; }
@Override public void checkSchema(ResourceSchema s) throws IOException { if (myUDFContextSignature == null) { throw new IllegalStateException("No UDFContext Signature provided to this UDF! Cannot store field names!"); } ResourceSchema.ResourceFieldSchema[] fields = s.getFields(); if (fields == null || fields.length == 0) { throw new IOException("Input field names not available from schema during front-end processing! FusionIndexPipelineStoreFunc must have field names!"); } List<String> fieldNames = new ArrayList<String>(fields.length); for (int f = 0; f < fields.length; f++) { fieldNames.add(fields[f].getName()); } // Save the fieldIndexToType Mapping in the UDFContext, keyed by our // UDFContext Signature so we don't step on other FusionIndexPipelineStoreFunc UDFs Properties udfProps = UDFContext.getUDFContext().getUDFProperties(getClass(), new String[]{myUDFContextSignature}); udfProps.put(FIELD_NAMES_FROM_SCHEMA_PROPS_KEY, fieldNames); log.info(String.format("Saved %s=%s into UDFContext using signature: %s", FIELD_NAMES_FROM_SCHEMA_PROPS_KEY, String.valueOf(fieldNames), myUDFContextSignature)); }
public static Schema getPigSchema(ResourceSchema rSchema) throws FrontendException { if(rSchema == null) { return null; } List<FieldSchema> fsList = new ArrayList<FieldSchema>(); for(ResourceFieldSchema rfs : rSchema.getFields()) { FieldSchema fs = new FieldSchema(rfs.getName(), rfs.getSchema() == null ? null : getPigSchema(rfs.getSchema()), rfs.getType()); if(rfs.getType() == DataType.BAG) { if (fs.schema != null) { // allow partial schema if (fs.schema.size() == 1) { FieldSchema innerFs = fs.schema.getField(0); if (innerFs.type != DataType.TUPLE) { ResourceFieldSchema.throwInvalidSchemaException(); } } else { ResourceFieldSchema.throwInvalidSchemaException(); } } } fsList.add(fs); } return new Schema(fsList); }
/** * Get all field names. * @return array of field names */ public String[] fieldNames() { String[] names = new String[fields.length]; for (int i=0; i<fields.length; i++) { names[i] = fields[i].getName(); } return names; }