static public Pair<String, String> getDBTableNames(String location) throws IOException { // the location string will be of the form: // <database name>.<table name> - parse it and // communicate the information to HCatInputFormat try { return HCatUtil.getDbAndTableName(location); } catch (IOException e) { String locationErrMsg = "The input location in load statement " + "should be of the form " + "<databasename>.<table name> or <table name>. Got " + location; throw new PigException(locationErrMsg, PIG_EXCEPTION_CODE); } }
throw new PigException("HCatalog column type '" + type.toString() + "' is not supported in Pig as a column type", PIG_EXCEPTION_CODE);
public Table getTable(String location, String hcatServerUri, String hcatServerPrincipal, Job job) throws IOException { Pair<String, String> loc_server = new Pair<String, String>(location, hcatServerUri); Table hcatTable = hcatTableCache.get(loc_server); if (hcatTable != null) { return hcatTable; } Pair<String, String> dbTablePair = PigHCatUtil.getDBTableNames(location); String dbName = dbTablePair.first; String tableName = dbTablePair.second; Table table = null; IMetaStoreClient client = null; try { client = getHiveMetaClient(hcatServerUri, hcatServerPrincipal, PigHCatUtil.class, job); table = HCatUtil.getTable(client, dbName, tableName); } catch (NoSuchObjectException nsoe) { throw new PigException("Table not found : " + nsoe.getMessage(), PIG_EXCEPTION_CODE); // prettier error messages to frontend } catch (Exception e) { throw new IOException(e); } finally { HCatUtil.closeHiveClientQuietly(client); } hcatTableCache.put(loc_server, table); return table; }
throw new PigException(he.getMessage(), PigHCatUtil.PIG_EXCEPTION_CODE, he);
@Override public ResourceSchema getSchema(String location, Job job) throws IOException { HCatContext.INSTANCE.setConf(job.getConfiguration()).getConf().get() .setBoolean(HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION, true); Table table = phutil.getTable(location, hcatServerUri != null ? hcatServerUri : PigHCatUtil.getHCatServerUri(job), PigHCatUtil.getHCatServerPrincipal(job), // Pass job to initialize metastore conf overrides for embedded metastore case // (hive.metastore.uris = ""). job); HCatSchema hcatTableSchema = HCatUtil.getTableSchemaWithPtnCols(table); try { PigHCatUtil.validateHCatTableSchemaFollowsPigRules(hcatTableSchema); } catch (IOException e) { throw new PigException( "Table schema incompatible for reading through HCatLoader :" + e.getMessage() + ";[Table schema was " + hcatTableSchema.toString() + "]" , PigHCatUtil.PIG_EXCEPTION_CODE, e); } storeInUDFContext(signature, HCatConstants.HCAT_TABLE_SCHEMA, hcatTableSchema); outputSchema = hcatTableSchema; return PigHCatUtil.getResourceSchema(hcatTableSchema); }
private static void validateHcatFieldFollowsPigRules(HCatFieldSchema hcatField) throws PigException { try { Type hType = hcatField.getType(); switch (hType) { case BOOLEAN: if (!pigHasBooleanSupport) { throw new PigException("Incompatible type found in HCat table schema: " + hcatField, PigHCatUtil.PIG_EXCEPTION_CODE); } break; case ARRAY: validateHCatSchemaFollowsPigRules(hcatField.getArrayElementSchema()); break; case STRUCT: validateHCatSchemaFollowsPigRules(hcatField.getStructSubSchema()); break; case MAP: // key is only string if (hcatField.getMapKeyType() != Type.STRING) { LOG.info("Converting non-String key of map " + hcatField.getName() + " from " + hcatField.getMapKeyType() + " to String."); } validateHCatSchemaFollowsPigRules(hcatField.getMapValueSchema()); break; } } catch (HCatException e) { throw new PigException("Incompatible type found in hcat table schema: " + hcatField, PigHCatUtil.PIG_EXCEPTION_CODE, e); } }
private void printUsage(final String location) throws PigException { String locationErrMsg = String.format("The input location in load statement should be of the form " + "%s<table name> or %s<query>. Got [%s] ",PHOENIX_TABLE_NAME_SCHEME,PHOENIX_QUERY_SCHEME,location); LOG.error(locationErrMsg); throw new PigException(locationErrMsg); }
final String errorMsg = String.format(" Error transforming PhoenixRecord to Tuple [%s] ", ex.getMessage()); LOG.error(errorMsg); throw new PigException(errorMsg);
private void printUsage(final String location) throws PigException { String locationErrMsg = String.format("The input location in load statement should be of the form " + "%s<table name> or %s<query>. Got [%s] ",PHOENIX_TABLE_NAME_SCHEME,PHOENIX_QUERY_SCHEME,location); LOG.error(locationErrMsg); throw new PigException(locationErrMsg); }
static public Pair<String, String> getDBTableNames(String location) throws IOException { // the location string will be of the form: // <database name>.<table name> - parse it and // communicate the information to HCatInputFormat try { return HCatUtil.getDbAndTableName(location); } catch (IOException e) { String locationErrMsg = "The input location in load statement " + "should be of the form " + "<databasename>.<table name> or <table name>. Got " + location; throw new PigException(locationErrMsg, PIG_EXCEPTION_CODE); } }
public void run(String script) throws PigException { try { pigServer.registerQuery(script); } catch (IOException e) { throw new PigException(e); } } }
static public Pair<String, String> getDBTableNames(String location) throws IOException { // the location string will be of the form: // <database name>.<table name> - parse it and // communicate the information to HCatInputFormat try { return HCatUtil.getDbAndTableName(location); } catch (IOException e) { String locationErrMsg = "The input location in load statement " + "should be of the form " + "<databasename>.<table name> or <table name>. Got " + location; throw new PigException(locationErrMsg, PIG_EXCEPTION_CODE); } }
static public Pair<String, String> getDBTableNames(String location) throws IOException { // the location string will be of the form: // <database name>.<table name> - parse it and // communicate the information to HCatInputFormat try { return HCatUtil.getDbAndTableName(location); } catch (IOException e) { String locationErrMsg = "The input location in load statement " + "should be of the form " + "<databasename>.<table name> or <table name>. Got " + location; throw new PigException(locationErrMsg, PIG_EXCEPTION_CODE); } }
public static ExecType selectExecType(Properties properties) throws PigException { ServiceLoader<ExecType> frameworkLoader = ServiceLoader .load(ExecType.class); for (ExecType execType : frameworkLoader) { log.info("Trying ExecType : " + execType); if (execType.accepts(properties)) { log.info("Picked " + execType + " as the ExecType"); return getSingleton(execType); } else { log.debug("Cannot pick " + execType + " as the ExecType"); } } throw new PigException("Unknown exec type: " + properties.getProperty("exectype"), 2040); }
public Table getTable(String location, String hcatServerUri, String hcatServerPrincipal, Job job) throws IOException { Pair<String, String> loc_server = new Pair<String, String>(location, hcatServerUri); Table hcatTable = hcatTableCache.get(loc_server); if (hcatTable != null) { return hcatTable; } Pair<String, String> dbTablePair = PigHCatUtil.getDBTableNames(location); String dbName = dbTablePair.first; String tableName = dbTablePair.second; Table table = null; IMetaStoreClient client = null; try { client = getHiveMetaClient(hcatServerUri, hcatServerPrincipal, PigHCatUtil.class, job); table = HCatUtil.getTable(client, dbName, tableName); } catch (NoSuchObjectException nsoe) { throw new PigException("Table not found : " + nsoe.getMessage(), PIG_EXCEPTION_CODE); // prettier error messages to frontend } catch (Exception e) { throw new IOException(e); } finally { HCatUtil.closeHiveClientQuietly(client); } hcatTableCache.put(loc_server, table); return table; }
private PigStats storeEx(String alias, String filename, String func) throws IOException { if ("@".equals(alias)) { alias = getLastRel(); } currDAG.parseQuery(); currDAG.skipStores(); // skip the stores that have already been processed currDAG.buildPlan( alias ); try { QueryParserUtils.attachStorePlan(scope, currDAG.lp, filename, func, currDAG.getOperator( alias ), alias, pigContext); currDAG.compile(); return executeCompiledLogicalPlan(); } catch (PigException e) { int errCode = 1002; String msg = "Unable to store alias " + alias; throw new PigException(msg, errCode, PigException.INPUT, e); } }
@Override public ResourceSchema getSchema(String location, Job job) throws IOException { HCatContext.INSTANCE.setConf(job.getConfiguration()).getConf().get() .setBoolean(HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION, true); Table table = phutil.getTable(location, hcatServerUri != null ? hcatServerUri : PigHCatUtil.getHCatServerUri(job), PigHCatUtil.getHCatServerPrincipal(job), // Pass job to initialize metastore conf overrides for embedded metastore case // (hive.metastore.uris = ""). job); HCatSchema hcatTableSchema = HCatUtil.getTableSchemaWithPtnCols(table); try { PigHCatUtil.validateHCatTableSchemaFollowsPigRules(hcatTableSchema); } catch (IOException e) { throw new PigException( "Table schema incompatible for reading through HCatLoader :" + e.getMessage() + ";[Table schema was " + hcatTableSchema.toString() + "]" , PigHCatUtil.PIG_EXCEPTION_CODE, e); } storeInUDFContext(signature, HCatConstants.HCAT_TABLE_SCHEMA, hcatTableSchema); outputSchema = hcatTableSchema; return PigHCatUtil.getResourceSchema(hcatTableSchema); }
@Override public ResourceSchema getSchema(String location, Job job) throws IOException { HCatContext.INSTANCE.setConf(job.getConfiguration()).getConf().get() .setBoolean(HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION, true); Table table = phutil.getTable(location, hcatServerUri != null ? hcatServerUri : PigHCatUtil.getHCatServerUri(job), PigHCatUtil.getHCatServerPrincipal(job), // Pass job to initialize metastore conf overrides for embedded metastore case // (hive.metastore.uris = ""). job); HCatSchema hcatTableSchema = HCatUtil.getTableSchemaWithPtnCols(table); try { PigHCatUtil.validateHCatTableSchemaFollowsPigRules(hcatTableSchema); } catch (IOException e) { throw new PigException( "Table schema incompatible for reading through HCatLoader :" + e.getMessage() + ";[Table schema was " + hcatTableSchema.toString() + "]" , PigHCatUtil.PIG_EXCEPTION_CODE, e); } storeInUDFContext(signature, HCatConstants.HCAT_TABLE_SCHEMA, hcatTableSchema); outputSchema = hcatTableSchema; return PigHCatUtil.getResourceSchema(hcatTableSchema); }
@Override public ResourceSchema getSchema(String location, Job job) throws IOException { HCatContext.INSTANCE.setConf(job.getConfiguration()).getConf().get() .setBoolean(HCatConstants.HCAT_DATA_TINY_SMALL_INT_PROMOTION, true); Table table = phutil.getTable(location, hcatServerUri != null ? hcatServerUri : PigHCatUtil.getHCatServerUri(job), PigHCatUtil.getHCatServerPrincipal(job), // Pass job to initialize metastore conf overrides for embedded metastore case // (hive.metastore.uris = ""). job); HCatSchema hcatTableSchema = HCatUtil.getTableSchemaWithPtnCols(table); try { PigHCatUtil.validateHCatTableSchemaFollowsPigRules(hcatTableSchema); } catch (IOException e) { throw new PigException( "Table schema incompatible for reading through HCatLoader :" + e.getMessage() + ";[Table schema was " + hcatTableSchema.toString() + "]" , PigHCatUtil.PIG_EXCEPTION_CODE, e); } storeInUDFContext(signature, HCatConstants.HCAT_TABLE_SCHEMA, hcatTableSchema); outputSchema = hcatTableSchema; return PigHCatUtil.getResourceSchema(hcatTableSchema); }
private static void validateHcatFieldFollowsPigRules(HCatFieldSchema hcatField) throws PigException { try { Type hType = hcatField.getType(); switch (hType) { case BOOLEAN: if (!pigHasBooleanSupport) { throw new PigException("Incompatible type found in HCat table schema: " + hcatField, PigHCatUtil.PIG_EXCEPTION_CODE); } break; case ARRAY: validateHCatSchemaFollowsPigRules(hcatField.getArrayElementSchema()); break; case STRUCT: validateHCatSchemaFollowsPigRules(hcatField.getStructSubSchema()); break; case MAP: // key is only string if (hcatField.getMapKeyType() != Type.STRING) { LOG.info("Converting non-String key of map " + hcatField.getName() + " from " + hcatField.getMapKeyType() + " to String."); } validateHCatSchemaFollowsPigRules(hcatField.getMapValueSchema()); break; } } catch (HCatException e) { throw new PigException("Incompatible type found in hcat table schema: " + hcatField, PigHCatUtil.PIG_EXCEPTION_CODE, e); } }