private static Object serializePrimitiveField(Object field, ObjectInspector fieldObjectInspector) { if (field == null) { return null; } Object f = ((PrimitiveObjectInspector) fieldObjectInspector).getPrimitiveJavaObject(field); if (f != null && HCatContext.INSTANCE.getConf().isPresent()) { Configuration conf = HCatContext.INSTANCE.getConf().get(); if (f instanceof Boolean && conf.getBoolean( HCatConstants.HCAT_DATA_CONVERT_BOOLEAN_TO_INTEGER, HCatConstants.HCAT_DATA_CONVERT_BOOLEAN_TO_INTEGER_DEFAULT)) { return ((Boolean) f) ? 1 : 0; } else if (f instanceof Short && conf.getBoolean( HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION, HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION_DEFAULT)) { return new Integer((Short) f); } else if (f instanceof Byte && conf.getBoolean( HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION, HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION_DEFAULT)) { return new Integer((Byte) f); } } return f; }
@Override public void setLocation(String location, Job job) throws IOException { HCatContext.INSTANCE.setConf(job.getConfiguration()).getConf().get() .setBoolean(HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION, true);
@Override public ResourceSchema getSchema(String location, Job job) throws IOException { HCatContext.INSTANCE.setConf(job.getConfiguration()).getConf().get() .setBoolean(HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION, true); Table table = phutil.getTable(location, hcatServerUri != null ? hcatServerUri : PigHCatUtil.getHCatServerUri(job), PigHCatUtil.getHCatServerPrincipal(job), // Pass job to initialize metastore conf overrides for embedded metastore case // (hive.metastore.uris = ""). job); HCatSchema hcatTableSchema = HCatUtil.getTableSchemaWithPtnCols(table); try { PigHCatUtil.validateHCatTableSchemaFollowsPigRules(hcatTableSchema); } catch (IOException e) { throw new PigException( "Table schema incompatible for reading through HCatLoader :" + e.getMessage() + ";[Table schema was " + hcatTableSchema.toString() + "]" , PigHCatUtil.PIG_EXCEPTION_CODE, e); } storeInUDFContext(signature, HCatConstants.HCAT_TABLE_SCHEMA, hcatTableSchema); outputSchema = hcatTableSchema; return PigHCatUtil.getResourceSchema(hcatTableSchema); }
private static Object serializePrimitiveField(Object field, ObjectInspector fieldObjectInspector) { if (field == null) { return null; } Object f = ((PrimitiveObjectInspector) fieldObjectInspector).getPrimitiveJavaObject(field); if (f != null && HCatContext.INSTANCE.getConf().isPresent()) { Configuration conf = HCatContext.INSTANCE.getConf().get(); if (f instanceof Boolean && conf.getBoolean( HCatConstants.HCAT_DATA_CONVERT_BOOLEAN_TO_INTEGER, HCatConstants.HCAT_DATA_CONVERT_BOOLEAN_TO_INTEGER_DEFAULT)) { return ((Boolean) f) ? 1 : 0; } else if (f instanceof Short && conf.getBoolean( HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION, HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION_DEFAULT)) { return new Integer((Short) f); } else if (f instanceof Byte && conf.getBoolean( HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION, HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION_DEFAULT)) { return new Integer((Byte) f); } } return f; }
private static Object serializePrimitiveField(Object field, ObjectInspector fieldObjectInspector) { if (field == null) { return null; } Object f = ((PrimitiveObjectInspector) fieldObjectInspector).getPrimitiveJavaObject(field); if (f != null && HCatContext.INSTANCE.getConf().isPresent()) { Configuration conf = HCatContext.INSTANCE.getConf().get(); if (f instanceof Boolean && conf.getBoolean( HCatConstants.HCAT_DATA_CONVERT_BOOLEAN_TO_INTEGER, HCatConstants.HCAT_DATA_CONVERT_BOOLEAN_TO_INTEGER_DEFAULT)) { return ((Boolean) f) ? 1 : 0; } else if (f instanceof Short && conf.getBoolean( HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION, HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION_DEFAULT)) { return new Integer((Short) f); } else if (f instanceof Byte && conf.getBoolean( HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION, HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION_DEFAULT)) { return new Integer((Byte) f); } } return f; }
private static Object serializePrimitiveField(Object field, ObjectInspector fieldObjectInspector) { if (field == null) { return null; } Object f = ((PrimitiveObjectInspector) fieldObjectInspector).getPrimitiveJavaObject(field); if (f != null && HCatContext.INSTANCE.getConf().isPresent()) { Configuration conf = HCatContext.INSTANCE.getConf().get(); if (f instanceof Boolean && conf.getBoolean( HCatConstants.HCAT_DATA_CONVERT_BOOLEAN_TO_INTEGER, HCatConstants.HCAT_DATA_CONVERT_BOOLEAN_TO_INTEGER_DEFAULT)) { return ((Boolean) f) ? 1 : 0; } else if (f instanceof Short && conf.getBoolean( HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION, HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION_DEFAULT)) { return new Integer((Short) f); } else if (f instanceof Byte && conf.getBoolean( HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION, HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION_DEFAULT)) { return new Integer((Byte) f); } } return f; }
private static Object serializePrimitiveField(Object field, ObjectInspector fieldObjectInspector) { if (field == null) { return null; } Object f = ((PrimitiveObjectInspector) fieldObjectInspector).getPrimitiveJavaObject(field); if (f != null && HCatContext.INSTANCE.getConf().isPresent()) { Configuration conf = HCatContext.INSTANCE.getConf().get(); if (f instanceof Boolean && conf.getBoolean( HCatConstants.HCAT_DATA_CONVERT_BOOLEAN_TO_INTEGER, HCatConstants.HCAT_DATA_CONVERT_BOOLEAN_TO_INTEGER_DEFAULT)) { return ((Boolean) f) ? 1 : 0; } else if (f instanceof Short && conf.getBoolean( HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION, HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION_DEFAULT)) { return new Integer((Short) f); } else if (f instanceof Byte && conf.getBoolean( HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION, HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION_DEFAULT)) { return new Integer((Byte) f); } } return f; }
@Override public void setLocation(String location, Job job) throws IOException { HCatContext.INSTANCE.setConf(job.getConfiguration()).getConf().get() .setBoolean(HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION, true); UDFContext udfContext = UDFContext.getUDFContext();
@Override public ResourceSchema getSchema(String location, Job job) throws IOException { HCatContext.INSTANCE.setConf(job.getConfiguration()).getConf().get() .setBoolean(HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION, true); Table table = phutil.getTable(location, hcatServerUri != null ? hcatServerUri : PigHCatUtil.getHCatServerUri(job), PigHCatUtil.getHCatServerPrincipal(job), // Pass job to initialize metastore conf overrides for embedded metastore case // (hive.metastore.uris = ""). job); HCatSchema hcatTableSchema = HCatUtil.getTableSchemaWithPtnCols(table); try { PigHCatUtil.validateHCatTableSchemaFollowsPigRules(hcatTableSchema); } catch (IOException e) { throw new PigException( "Table schema incompatible for reading through HCatLoader :" + e.getMessage() + ";[Table schema was " + hcatTableSchema.toString() + "]" , PigHCatUtil.PIG_EXCEPTION_CODE, e); } storeInUDFContext(signature, HCatConstants.HCAT_TABLE_SCHEMA, hcatTableSchema); outputSchema = hcatTableSchema; return PigHCatUtil.getResourceSchema(hcatTableSchema); }
@Override public void setLocation(String location, Job job) throws IOException { HCatContext.INSTANCE.setConf(job.getConfiguration()).getConf().get() .setBoolean(HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION, true);
@Override public void setLocation(String location, Job job) throws IOException { HCatContext.INSTANCE.setConf(job.getConfiguration()).getConf().get() .setBoolean(HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION, true);
@Override public ResourceSchema getSchema(String location, Job job) throws IOException { HCatContext.INSTANCE.setConf(job.getConfiguration()).getConf().get() .setBoolean(HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION, true); Table table = phutil.getTable(location, hcatServerUri != null ? hcatServerUri : PigHCatUtil.getHCatServerUri(job), PigHCatUtil.getHCatServerPrincipal(job), // Pass job to initialize metastore conf overrides for embedded metastore case // (hive.metastore.uris = ""). job); HCatSchema hcatTableSchema = HCatUtil.getTableSchemaWithPtnCols(table); try { PigHCatUtil.validateHCatTableSchemaFollowsPigRules(hcatTableSchema); } catch (IOException e) { throw new PigException( "Table schema incompatible for reading through HCatLoader :" + e.getMessage() + ";[Table schema was " + hcatTableSchema.toString() + "]" , PigHCatUtil.PIG_EXCEPTION_CODE, e); } storeInUDFContext(signature, HCatConstants.HCAT_TABLE_SCHEMA, hcatTableSchema); outputSchema = hcatTableSchema; return PigHCatUtil.getResourceSchema(hcatTableSchema); }
@Override public ResourceSchema getSchema(String location, Job job) throws IOException { HCatContext.INSTANCE.setConf(job.getConfiguration()).getConf().get() .setBoolean(HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION, true); Table table = phutil.getTable(location, hcatServerUri != null ? hcatServerUri : PigHCatUtil.getHCatServerUri(job), PigHCatUtil.getHCatServerPrincipal(job), // Pass job to initialize metastore conf overrides for embedded metastore case // (hive.metastore.uris = ""). job); HCatSchema hcatTableSchema = HCatUtil.getTableSchemaWithPtnCols(table); try { PigHCatUtil.validateHCatTableSchemaFollowsPigRules(hcatTableSchema); } catch (IOException e) { throw new PigException( "Table schema incompatible for reading through HCatLoader :" + e.getMessage() + ";[Table schema was " + hcatTableSchema.toString() + "]" , PigHCatUtil.PIG_EXCEPTION_CODE, e); } storeInUDFContext(signature, HCatConstants.HCAT_TABLE_SCHEMA, hcatTableSchema); outputSchema = hcatTableSchema; return PigHCatUtil.getResourceSchema(hcatTableSchema); }