Refine search
@Override public void putNext(Tuple tuple) throws IOException { List<Object> outgoing = new ArrayList<Object>(tuple.size()); int i = 0; for (HCatFieldSchema fSchema : computedSchema.getFields()) { outgoing.add(getJavaObj(tuple.get(i++), fSchema)); } try { writer.write(null, new DefaultHCatRecord(outgoing)); } catch (InterruptedException e) { throw new BackendException("Error while writing tuple: " + tuple, PigHCatUtil.PIG_EXCEPTION_CODE, e); } }
private Object extractKeysFromIdxTuple(Tuple idxTuple) throws ExecException{ int idxTupSize = idxTuple.size(); if(idxTupSize == 3) return idxTuple.get(0); int numColsInKey = (idxTupSize - 2); List<Object> list = new ArrayList<Object>(numColsInKey); for(int i=0; i < numColsInKey; i++) list.add(idxTuple.get(i)); return mTupleFactory.newTupleNoCopy(list); }
@Override public Tuple call(Tuple input) throws Exception { Tuple output = TupleFactory.getInstance() .newTuple(input.getAll().size() - 2); for (int i = 1; i < input.getAll().size() - 2; i ++) { output.set(i, input.get(i+2)); } long offset = calculateOffset((Integer) input.get(0)); output.set(0, offset + (Long)input.get(2)); return output; }
@Override public Tuple exec(Tuple input) throws IOException { // Since Initial is guaranteed to be called // only in the map, it will be called with an // input of a bag with a single tuple - the // count should always be 1 if bag is non empty DataBag bag = (DataBag)input.get(0); return mTupleFactory.newTuple(bag.iterator().hasNext()? Long.valueOf(1L) : Long.valueOf(0L)); } }
@Override public Map<String, String> exec(Tuple input) throws IOException { try { if (input == null || input.size() < 1) { throw new IOException("Not enough arguments to " + this.getClass().getName() + ": got " + input.size() + ", expected at least 1"); } if (input.get(0) == null) { return null; } String jsonLiteral = (String) input.get(0); return parseStringToMap(jsonLiteral); } catch (ExecException e) { LOG.warn("Error in " + getClass() + " with input " + input, e); throw new IOException(e); } }
ExampleTuple GenerateMatchingTuple(Tuple constraint, LogicalExpressionPlan predicate, boolean invert) throws ExecException, FrontendException { Tuple t = TupleFactory.getInstance().newTuple(constraint.size()); ExampleTuple tOut = new ExampleTuple(t); for (int i = 0; i < t.size(); i++) tOut.set(i, constraint.get(i)); GenerateMatchingTupleHelper(tOut, predicate .getSources().get(0), invert); tOut.synthetic = true; return tOut; }
/** * Reserve N next sequences for a sequence name. N is the first field in the tuple. Sequence name is the second * field in the tuple zkquorum is the third field in the tuple */ @Override public Long exec(Tuple input) throws IOException { Preconditions.checkArgument(input != null && input.size() >= 2, INVALID_TUPLE_MESSAGE); Long numToReserve = (Long)(input.get(0)); Preconditions.checkArgument(numToReserve > 0, INVALID_NUMBER_MESSAGE); String sequenceName = (String)input.get(1); Preconditions.checkNotNull(sequenceName, EMPTY_SEQUENCE_NAME_MESSAGE); // It will create a connection when called for the first Tuple per task. // The connection gets cleaned up in finish() method if (connection == null) { initConnection(); } ResultSet rs = null; try { String sql = getNextNSequenceSelectStatement(Long.valueOf(numToReserve), sequenceName); rs = connection.createStatement().executeQuery(sql); Preconditions.checkArgument(rs.next()); Long startIndex = rs.getLong(1); rs.close(); connection.commit(); return startIndex; } catch (SQLException e) { throw new IOException("Caught exception while processing row." + e.getMessage(), e); } }
@Override public void putNext(Tuple t) throws IOException { ResourceFieldSchema[] fieldSchemas = (schema == null) ? null : schema.getFields(); PhoenixRecord record = new PhoenixRecord(fieldSchemas); for(int i=0; i<t.size(); i++) { record.add(t.get(i)); } try { writer.write(null, record); } catch (InterruptedException e) { throw new RuntimeException(e); } }
@Override public Tuple exec(Tuple input) throws IOException { DataByteArray dba = (DataByteArray) input.get(0); DocumentMetadata metadata = DocumentWrapper.parseFrom(dba.get()) .getDocumentMetadata(); Tuple output = TupleFactory.getInstance().newTuple( fieldNumberMap.size()); output = addDocumentMetatdataFields(metadata, output); return output; }
@Override public void putNext(Tuple t) throws IOException { ResourceFieldSchema[] fieldSchemas = (schema == null) ? null : schema.getFields(); PhoenixRecordWritable record = new PhoenixRecordWritable(this.columnInfo); try { for(int i=0; i<t.size(); i++) { Object value = t.get(i); if(value == null) { record.add(null); continue; } ColumnInfo cinfo = this.columnInfo.get(i); byte type = (fieldSchemas == null) ? DataType.findType(value) : fieldSchemas[i].getType(); PDataType pDataType = PDataType.fromTypeId(cinfo.getSqlType()); Object v = TypeUtil.castPigTypeToPhoenix(value, type, pDataType); record.add(v); } this.writer.write(null, record); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw new RuntimeException(e); } catch (SQLException e) { LOG.error("Error on tuple {} .",t); throw new IOException(e); } }
@Override public Tuple exec(Tuple tuple) throws IOException { initialize(); String userAgentString = (String) tuple.get(0); UserAgent agent = analyzer.parse(userAgentString); Tuple result = TUPLE_FACTORY.newTuple(); for (String fieldName: requestedFields) { result.append(agent.getValue(fieldName)); } return result; }
assertEquals(DataType.LONG, DataType.findType(t.get(2))); assertEquals(DataType.TUPLE, DataType.findType(t.get(4))); Tuple doubleArrayTuple = (Tuple)t.get(4); assertEquals(2,doubleArrayTuple.size()); assertEquals(DataType.BIGDECIMAL, DataType.findType(t.get(0))); assertEquals(DataType.BIGINTEGER, DataType.findType(t.get(1)));
@Override public Tuple exec(Tuple tuple) throws IOException { DataByteArray dba = (DataByteArray) tuple.get(1); DocumentProtos.DocumentWrapper docWrapper = DocumentProtos.DocumentWrapper.parseFrom(dba.get()); String id = docWrapper.getDocumentMetadata().getKey(); String title = docWrapper.getDocumentMetadata().getBasicMetadata().getTitle(0).getText(); Tuple retTuple = TupleFactory.getInstance().newTuple(Arrays.asList(id, title)); return retTuple; } }
@Override public Long exec(Tuple input) throws IOException { if (input == null || input.size() < 2 || input.get(0) == null || input.get(1) == null) { return null; } DateTime startDate = (DateTime) input.get(0); DateTime endDate = (DateTime) input.get(1); // Larger date first // Subtraction may overflow return (startDate.getMillis() - endDate.getMillis()) / 60000L; }
@Override public Long exec(Tuple input) throws IOException { if (input == null || input.size() < 2 || input.get(0) == null || input.get(1) == null) { return null; } DateTime startDate = (DateTime) input.get(0); DateTime endDate = (DateTime) input.get(1); // Larger date first // Subtraction may overflow return startDate.getMillis() - endDate.getMillis(); }
@Override public Long exec(Tuple input) throws IOException { if (input == null || input.size() < 2 || input.get(0) == null || input.get(1) == null) { return null; } DateTime startDate = (DateTime) input.get(0); DateTime endDate = (DateTime) input.get(1); // Larger date first return (startDate.getMillis() - endDate.getMillis()) / 3600000L; }