protected void logAndRegister(PigServer server, String query, int lineNumber) throws IOException { assert lineNumber > 0 : "(lineNumber > 0) is false"; LOG.info("Registering pig query: " + query); server.registerQuery(query, lineNumber); }
pigServer.registerQuery(String.format( "A = load 'hbase://table/%s' using " + PhoenixHBaseLoader.class.getName() + "('%s');", TABLE, zkQuorum)); pigServer.registerQuery("B = GROUP A BY AGE;"); pigServer.registerQuery("C = FOREACH B GENERATE group,COUNT(A);"); pigServer.registerQuery("STORE C INTO 'out' using mock.Storage();"); pigServer.executeBatch();
pigServer.registerQuery(String.format( "A = load 'hbase://query/%s' using org.apache.phoenix.pig.PhoenixHBaseLoader('%s');", sqlQuery, zkQuorum));
pigServer.registerQuery(String.format( "A = load 'hbase://query/%s' using " + PhoenixHBaseLoader.class.getName() + "('%s');", sqlQuery, zkQuorum));
@Test public void testDataForSQLQueryWithFunctions() throws Exception { //create the table final String TABLE = "TABLE9"; String ddl = "CREATE TABLE " + TABLE + " (ID INTEGER NOT NULL PRIMARY KEY, NAME VARCHAR) "; conn.createStatement().execute(ddl); final String dml = "UPSERT INTO " + TABLE + " VALUES(?,?)"; PreparedStatement stmt = conn.prepareStatement(dml); int rows = 20; for(int i = 0 ; i < rows; i++) { stmt.setInt(1, i); stmt.setString(2, "a"+i); stmt.execute(); } conn.commit(); //sql query final String sqlQuery = " SELECT UPPER(NAME) AS n FROM " + TABLE + " ORDER BY ID" ; pigServer.registerQuery(String.format( "A = load 'hbase://query/%s' using " + PhoenixHBaseLoader.class.getName() + "('%s');", sqlQuery, zkQuorum)); Iterator<Tuple> iterator = pigServer.openIterator("A"); int i = 0; while (iterator.hasNext()) { Tuple tuple = iterator.next(); String name = (String)tuple.get(0); assertEquals("A" + i, name); i++; } }
pigServer.registerQuery(String.format( "A = load 'hbase://table/%s' using " + PhoenixHBaseLoader.class.getName() + "('%s');", CASE_SENSITIVE_TABLE_FULL_NAME, zkQuorum)); pigServer.registerQuery("B = FILTER A BY AGE > 25;");
@Test public void testDateForSQLQuery() throws Exception { //create the table String ddl = "CREATE TABLE DATE_T (MYKEY VARCHAR,DATE_STP Date CONSTRAINT PK PRIMARY KEY (MYKEY)) "; conn.createStatement().execute(ddl); final String dml = "UPSERT INTO DATE_T VALUES('foo',TO_DATE('2004-03-10 10:00:00'))"; conn.createStatement().execute(dml); conn.commit(); //sql query final String sqlQuery = " SELECT mykey, hour(DATE_STP) FROM DATE_T "; pigServer.registerQuery(String.format( "A = load 'hbase://query/%s' using org.apache.phoenix.pig.PhoenixHBaseLoader('%s');", sqlQuery, zkQuorum)); final Iterator<Tuple> iterator = pigServer.openIterator("A"); while (iterator.hasNext()) { Tuple tuple = iterator.next(); assertEquals("foo", tuple.get(0)); assertEquals(10, tuple.get(1)); } }
@Test public void testTimestampForSQLQuery() throws Exception { //create the table String ddl = "CREATE TABLE TIMESTAMP_T (MYKEY VARCHAR,DATE_STP TIMESTAMP CONSTRAINT PK PRIMARY KEY (MYKEY)) "; conn.createStatement().execute(ddl); final String dml = "UPSERT INTO TIMESTAMP_T VALUES('foo',TO_TIMESTAMP('2006-04-12 00:00:00'))"; conn.createStatement().execute(dml); conn.commit(); //sql query final String sqlQuery = " SELECT mykey, year(DATE_STP) FROM TIMESTAMP_T "; pigServer.registerQuery(String.format( "A = load 'hbase://query/%s' using org.apache.phoenix.pig.PhoenixHBaseLoader('%s');", sqlQuery, zkQuorum)); final Iterator<Tuple> iterator = pigServer.openIterator("A"); while (iterator.hasNext()) { Tuple tuple = iterator.next(); assertEquals("foo", tuple.get(0)); assertEquals(2006, tuple.get(1)); } }
@Test public void testTimeForSQLQuery() throws Exception { //create the table String ddl = "CREATE TABLE TIME_T (MYKEY VARCHAR,DATE_STP TIME CONSTRAINT PK PRIMARY KEY (MYKEY)) "; conn.createStatement().execute(ddl); final String dml = "UPSERT INTO TIME_T VALUES('foo',TO_TIME('2008-05-16 00:30:00'))"; conn.createStatement().execute(dml); conn.commit(); //sql query final String sqlQuery = " SELECT mykey, minute(DATE_STP) FROM TIME_T "; pigServer.registerQuery(String.format( "A = load 'hbase://query/%s' using org.apache.phoenix.pig.PhoenixHBaseLoader('%s');", sqlQuery, zkQuorum)); final Iterator<Tuple> iterator = pigServer.openIterator("A"); while (iterator.hasNext()) { Tuple tuple = iterator.next(); assertEquals("foo", tuple.get(0)); assertEquals(30, tuple.get(1)); } }
@Test public void testDataFromIndexTable() throws Exception { //create the table String ddl = "CREATE TABLE " + TABLE_NAME + " (ID INTEGER NOT NULL, NAME VARCHAR NOT NULL, EMPLID INTEGER CONSTRAINT pk PRIMARY KEY (ID, NAME)) IMMUTABLE_ROWS=true"; conn.createStatement().execute(ddl); //create a index table String indexDdl = " CREATE INDEX " + INDEX_NAME + " ON " + TABLE_NAME + " (EMPLID) INCLUDE (NAME) "; conn.createStatement().execute(indexDdl); //upsert the data. final String dml = "UPSERT INTO " + TABLE_NAME + " VALUES(?,?,?)"; PreparedStatement stmt = conn.prepareStatement(dml); int rows = 20; for(int i = 0 ; i < rows; i++) { stmt.setInt(1, i); stmt.setString(2, "a"+i); stmt.setInt(3, i * 5); stmt.execute(); } conn.commit(); pigServer.registerQuery("A = load 'hbase://query/SELECT NAME , EMPLID FROM A WHERE EMPLID = 25 ' using " + PhoenixHBaseLoader.class.getName() + "('"+zkQuorum + "') ;"); Iterator<Tuple> iterator = pigServer.openIterator("A"); while (iterator.hasNext()) { Tuple tuple = iterator.next(); assertEquals("a5", tuple.get(0)); assertEquals(25, tuple.get(1)); } }
/** * Validates the schema returned when specific columns of a table are given as part of LOAD . * @throws Exception */ @Test public void testSchemaForTableWithSpecificColumns() throws Exception { //create the table final String TABLE = "TABLE2"; final String ddl = "CREATE TABLE " + TABLE + " (ID INTEGER NOT NULL PRIMARY KEY,NAME VARCHAR, AGE INTEGER) "; conn.createStatement().execute(ddl); final String selectColumns = "ID,NAME"; pigServer.registerQuery(String.format( "A = load 'hbase://table/%s/%s' using " + PhoenixHBaseLoader.class.getName() + "('%s');", TABLE, selectColumns, zkQuorum)); Schema schema = pigServer.dumpSchema("A"); List<FieldSchema> fields = schema.getFields(); assertEquals(2, fields.size()); assertTrue(fields.get(0).alias.equalsIgnoreCase("ID")); assertTrue(fields.get(0).type == DataType.INTEGER); assertTrue(fields.get(1).alias.equalsIgnoreCase("NAME")); assertTrue(fields.get(1).type == DataType.CHARARRAY); }
/** * Validates the schema returned when a SQL SELECT query is given as part of LOAD . * @throws Exception */ @Test public void testSchemaForQuery() throws Exception { //create the table. final String TABLE = "TABLE3"; String ddl = String.format("CREATE TABLE " + TABLE + " (A_STRING VARCHAR NOT NULL, A_DECIMAL DECIMAL NOT NULL, CF1.A_INTEGER INTEGER, CF2.A_DOUBLE DOUBLE" + " CONSTRAINT pk PRIMARY KEY (A_STRING, A_DECIMAL))\n", TABLE); conn.createStatement().execute(ddl); //sql query for LOAD final String sqlQuery = "SELECT A_STRING,CF1.A_INTEGER,CF2.A_DOUBLE FROM " + TABLE; pigServer.registerQuery(String.format( "A = load 'hbase://query/%s' using " + PhoenixHBaseLoader.class.getName() + "('%s');", sqlQuery, zkQuorum)); //assert the schema. Schema schema = pigServer.dumpSchema("A"); List<FieldSchema> fields = schema.getFields(); assertEquals(3, fields.size()); assertTrue(fields.get(0).alias.equalsIgnoreCase("a_string")); assertTrue(fields.get(0).type == DataType.CHARARRAY); assertTrue(fields.get(1).alias.equalsIgnoreCase("a_integer")); assertTrue(fields.get(1).type == DataType.INTEGER); assertTrue(fields.get(2).alias.equalsIgnoreCase("a_double")); assertTrue(fields.get(2).type == DataType.DOUBLE); }
pigServer.registerQuery(String.format( "raw = load 'hbase://query/%s' using " + PhoenixHBaseLoader.class.getName() + "('%s') AS (a:chararray,b:bigdecimal,c:int,d:double);", sqlQuery, zkQuorum));
/** * Validates the schema returned for a table with Pig data types. * @throws Exception */ @Test public void testSchemaForTable() throws Exception { final String TABLE = "TABLE1"; final String ddl = String.format("CREATE TABLE %s " + " (a_string varchar not null, a_binary varbinary not null, a_integer integer, cf1.a_float float" + " CONSTRAINT pk PRIMARY KEY (a_string, a_binary))\n", TABLE); conn.createStatement().execute(ddl); conn.commit(); pigServer.registerQuery(String.format( "A = load 'hbase://table/%s' using " + PhoenixHBaseLoader.class.getName() + "('%s');", TABLE, zkQuorum)); final Schema schema = pigServer.dumpSchema("A"); List<FieldSchema> fields = schema.getFields(); assertEquals(4, fields.size()); assertTrue(fields.get(0).alias.equalsIgnoreCase("a_string")); assertTrue(fields.get(0).type == DataType.CHARARRAY); assertTrue(fields.get(1).alias.equalsIgnoreCase("a_binary")); assertTrue(fields.get(1).type == DataType.BYTEARRAY); assertTrue(fields.get(2).alias.equalsIgnoreCase("a_integer")); assertTrue(fields.get(2).type == DataType.INTEGER); assertTrue(fields.get(3).alias.equalsIgnoreCase("a_float")); assertTrue(fields.get(3).type == DataType.FLOAT); }
pigServer.registerQuery(String.format( "A = load 'hbase://query/%s' using " + PhoenixHBaseLoader.class.getName() + "('%s');", sqlQuery, zkQuorum));
pigServer.registerQuery("A = LOAD 'in' USING mock.Storage();"); pigServer.registerQuery("Store A into 'hbase://" + tableName + "' using " + PhoenixHBaseStorage.class.getName() + "('" + zkQuorum + "', '-batchSize 1000');");
pigServer.registerQuery("A = LOAD 'in' USING mock.Storage();"); pigServer.registerQuery("Store A into 'hbase://" + tableName + "' using " + PhoenixHBaseStorage.class.getName() + "('" + zkQuorum + "', '-batchSize 1000');");
pigServer.registerQuery("A = LOAD 'in' USING mock.Storage() as (id:int, dbl:tuple());"); pigServer.registerQuery("Store A into 'hbase://" + tableName + "/ID,DBL" + "' using " + PhoenixHBaseStorage.class.getName() + "('" + zkQuorum + "', '-batchSize 1000');");
pigServer.registerQuery("A = LOAD 'in' USING mock.Storage();"); pigServer.registerQuery("B = FOREACH A GENERATE id,name;"); pigServer.registerQuery("Store B into 'hbase://" + tableName + "/ID,NAME" + "' using " + PhoenixHBaseStorage.class.getName() + "('" + zkQuorum + "', '-batchSize 1000');");
pigServer.registerQuery("A = LOAD 'in' USING mock.Storage();"); pigServer.registerQuery("Store A into 'hbase://" + tableName + "' using " + PhoenixHBaseStorage.class.getName() + "('" + zkQuorum + "', '-batchSize 1000');");