@Override public void eval() { org.apache.drill.exec.vector.complex.writer.BaseWriter.MapWriter mapWriter = outWriter.rootAsMap();
@Override public void eval() { Object[] tokens; if (in.isSet == 1) { String inputString = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(in.start, in.end, in.buffer); // Convert the iterable to an array as Janino will not handle generics. tokens = com.google.common.collect.Iterables.toArray(splitter.split(inputString), String.class); } else { tokens = new Object[0]; } org.apache.drill.exec.vector.complex.writer.BaseWriter.ListWriter list = writer.rootAsList(); list.startList(); org.apache.drill.exec.vector.complex.writer.VarCharWriter varCharWriter = list.varChar(); for (Object token : tokens) { final byte[] strBytes = ((String) token).getBytes(com.google.common.base.Charsets.UTF_8); buffer = buffer.reallocIfNeeded(strBytes.length); buffer.setBytes(0, strBytes); varCharWriter.writeVarChar(0, strBytes.length, buffer); } list.endList(); } }
BaseWriter.MapWriter fieldWriter = writer.rootAsMap(); while (fieldPath.getChild() != null && !fieldPath.getChild().isArray()) { fieldWriter = fieldWriter.map(fieldPath.getNameSegment().getPath());
@Override public void eval() { org.apache.drill.exec.vector.complex.writer.BaseWriter.MapWriter mapWriter = outWriter.rootAsMap();
writeDataSwitch(writer.rootAsMap()); break; case START_ARRAY: if (t == JsonToken.START_OBJECT) { inOuterList = true; writeDataSwitch(writer.rootAsMap()); } else { String message = "The top level of your document must either be a single array of maps or a set " writeDataSwitch(writer.rootAsList());
@Override public void eval() { org.apache.drill.exec.vector.complex.writer.BaseWriter.MapWriter mapWriter = outWriter.rootAsMap();
@Override public void eval() { org.apache.drill.exec.vector.complex.writer.BaseWriter.MapWriter mapWriter = outWriter.rootAsMap();
throw new DrillRuntimeException("kvgen function only supports Simple maps as input"); BaseWriter.ListWriter listWriter = writer.rootAsList(); listWriter.startList(); BaseWriter.MapWriter mapWriter = listWriter.map();
parser, String.format("Cannot read from the middle of a record. Current token was %s ", token)); writer.rootAsMap().bit("count").writeBit(1); parser.skipChildren(); } catch (com.fasterxml.jackson.core.JsonParseException ex) {
@Override public void setup(final OperatorContext context, final OutputMutator output) throws ExecutionSetupException { try { /** * Extract the list of field names for the parser to use if it is NOT a star query. If it is a star query just * pass through an empty map, because the parser is going to have to build all possibilities. */ final Map<String, String> fieldMapping = !isStarQuery() ? makeParserFields() : null; writer = new VectorContainerWriter(output); parser = new HttpdParser(writer.rootAsMap(), context.getManagedBuffer(), HttpdLogFormatPlugin.this.getConfig().getLogFormat(), HttpdLogFormatPlugin.this.getConfig().getTimestampFormat(), fieldMapping); final Path path = fs.makeQualified(new Path(work.getPath())); FileSplit split = new FileSplit(path, work.getStart(), work.getLength(), new String[]{""}); TextInputFormat inputFormat = new TextInputFormat(); JobConf job = new JobConf(fs.getConf()); job.setInt("io.file.buffer.size", fragmentContext.getConfig().getInt(ExecConstants.TEXT_LINE_READER_BUFFER_SIZE)); job.setInputFormat(inputFormat.getClass()); lineReader = (LineRecordReader) inputFormat.getRecordReader(split, job, Reporter.NULL); lineNumber = lineReader.createKey(); } catch (NoSuchMethodException | MissingDissectorsException | InvalidDissectorException e) { throw handleAndGenerate("Failure creating HttpdParser", e); } catch (IOException e) { throw handleAndGenerate("Failure creating HttpdRecordReader", e); } }
public void eval() { org.apache.drill.exec.vector.complex.writer.BaseWriter.MapWriter queryMapWriter = outWriter.rootAsMap();
/** * This record reader is given a batch of records (lines) to read. Next acts upon a batch of records. * * @return Number of records in this batch. */ @Override public int next() { try { final Text line = lineReader.createValue(); writer.allocate(); writer.reset(); int recordCount = 0; while (recordCount < VECTOR_MEMORY_ALLOCATION && lineReader.next(lineNumber, line)) { writer.setPosition(recordCount); parser.parse(line.toString()); recordCount++; } writer.setValueCount(recordCount); return recordCount; } catch (DissectionFailure | InvalidDissectorException | MissingDissectorsException | IOException e) { throw handleAndGenerate("Failure while parsing log record.", e); } }
@Override public void eval() { if (in.isSet == 0) { // Return empty map org.apache.drill.exec.vector.complex.writer.BaseWriter.MapWriter mapWriter = writer.rootAsMap(); mapWriter.start(); mapWriter.end(); return; } try { jsonReader.setSource(in.start, in.end, in.buffer); jsonReader.write(writer); buffer = jsonReader.getWorkBuf(); } catch (Exception e) { throw new org.apache.drill.common.exceptions.DrillRuntimeException("Error while converting from JSON. ", e); } } }
@Override public void eval() { if (in.isSet == 0) { // Return empty map org.apache.drill.exec.vector.complex.writer.BaseWriter.MapWriter mapWriter = writer.rootAsMap(); mapWriter.start(); mapWriter.end(); return; } try { jsonReader.setSource(in.start, in.end, in.buffer); jsonReader.write(writer); buffer = jsonReader.getWorkBuf(); } catch (Exception e) { throw new org.apache.drill.common.exceptions.DrillRuntimeException("Error while converting from JSON. ", e); } } }
public static void createMap(FieldReader reader, BaseWriter.ComplexWriter writer, String caller) { if (DataMode.REPEATED == reader.getType().getMode()) { throw new DrillRuntimeException("Do not invoke createMap() with REPEATED MINOR mode"); } if (reader.getType().getMinorType() == TypeProtos.MinorType.MAP) { BaseWriter.MapWriter mapWriter = writer.rootAsMap(); // Iterate over the fields in the map Iterator<String> fieldIterator = reader.iterator(); while (fieldIterator.hasNext()) { String field = fieldIterator.next(); FieldReader fieldReader = reader.reader(field); // Write the value to the map MapUtility.writeToMapFromReader(fieldReader, mapWriter, field, caller); } } }
@Override public void eval() { String inputString = org.apache.drill.exec.expr.fn.impl.StringFunctionHelpers.toStringFromUTF8(in.start, in.end, in.buffer); // Convert the iterable to an array as Janino will not handle generics. Object[] tokens = com.google.common.collect.Iterables.toArray(splitter.split(inputString), String.class); org.apache.drill.exec.vector.complex.writer.BaseWriter.ListWriter list = writer.rootAsList(); list.startList(); for (Object token : tokens) { final byte[] strBytes = ((String) token).getBytes(com.google.common.base.Charsets.UTF_8); buffer = buffer.reallocIfNeeded(strBytes.length); buffer.setBytes(0, strBytes); list.varChar().writeVarChar(0, strBytes.length, buffer); } list.endList(); } }
private boolean doAlloc(int recordCount) { for (ValueVector v : this.allocationVectors) { // This will iteratively allocate memory for nested columns underneath. RecordBatchSizer.ColumnSize colSize = flattenMemoryManager.getColumnSize(v.getField().getName()); colSize.allocateVector(v, recordCount); } //Allocate vv for complexWriters. if (complexWriters == null) { return true; } for (ComplexWriter writer : complexWriters) { writer.allocate(); } return true; }
public void ensureAtLeastOneField(ComplexWriter writer) { if (!atLeastOneWrite) { // if we had no columns, create one empty one so we can return some data // for count purposes. SchemaPath sp = columns.get(0); PathSegment root = sp.getRootSegment(); BaseWriter.MapWriter fieldWriter = writer.rootAsMap(); while (root.getChild() != null && !root.getChild().isArray()) { fieldWriter = fieldWriter.map(root.getNameSegment().getPath()); root = root.getChild(); } fieldWriter.integer(root.getNameSegment().getPath()); } }
public void setValueCount(int count) { writer.setValueCount(count); }
public void setPosition(int position) { writer.setPosition(position); }