public WriteCSVResult(final CSVFormat csvFormat, final RecordSchema recordSchema, final SchemaAccessWriter schemaWriter, final OutputStream out, final String dateFormat, final String timeFormat, final String timestampFormat, final boolean includeHeaderLine, final String charSet) throws IOException { super(out); this.recordSchema = recordSchema; this.schemaWriter = schemaWriter; this.dateFormat = dateFormat; this.timeFormat = timeFormat; this.timestampFormat = timestampFormat; this.includeHeaderLine = includeHeaderLine; final CSVFormat formatWithHeader = csvFormat.withSkipHeaderRecord(true); final OutputStreamWriter streamWriter = new OutputStreamWriter(out, charSet); printer = new CSVPrinter(streamWriter, formatWithHeader); fieldValues = new Object[recordSchema.getFieldCount()]; }
private static Schema buildAvroSchema(final RecordSchema recordSchema) { final List<Field> avroFields = new ArrayList<>(recordSchema.getFieldCount()); for (final RecordField recordField : recordSchema.getFields()) { avroFields.add(buildAvroField(recordField)); } final Schema avroSchema = Schema.createRecord("nifiRecord", null, "org.apache.nifi", false, avroFields); return avroSchema; }
@Override public WriteResult write(Record record) throws IOException { if (++recordCount > failAfterN && failAfterN > -1) { throw new IOException("Unit Test intentionally throwing IOException after " + failAfterN + " records were written"); } if (header != null && !headerWritten) { out.write(header.getBytes()); out.write("\n".getBytes()); headerWritten = true; } final int numCols = record.getSchema().getFieldCount(); int i = 0; for (final String fieldName : record.getSchema().getFieldNames()) { final String val = record.getAsString(fieldName); if (val != null) { if (quoteValues) { out.write("\"".getBytes()); out.write(val.getBytes()); out.write("\"".getBytes()); } else { out.write(val.getBytes()); } } if (i++ < numCols - 1) { out.write(",".getBytes()); } } out.write("\n".getBytes()); return WriteResult.of(1, Collections.emptyMap()); }
protected Record createRecord(final ResultSet rs) throws SQLException { final Map<String, Object> values = new HashMap<>(schema.getFieldCount()); for (final RecordField field : schema.getFields()) { final String fieldName = field.getFieldName(); final Object value; if (rsColumnNames.contains(fieldName)) { value = normalizeValue(rs.getObject(fieldName)); } else { value = null; } values.put(fieldName, value); } return new MapRecord(schema, values); }
final int numCols = record.getSchema().getFieldCount();
@Override public Object[] getValues() { final Object[] values = new Object[schema.getFieldCount()]; int i = 0; for (final RecordField recordField : schema.getFields()) { Object value = getExplicitValue(recordField); if (value == null) { value = recordField.getDefaultValue(); } values[i++] = value; } return values; }
private void writeRecord(final Record record, final RecordSchema writeSchema, final JsonGenerator generator) throws IOException { RecordSchema schema = record.getSchema(); generator.writeStartObject(); for (int i = 0; i < schema.getFieldCount(); i++) { final RecordField field = schema.getField(i); final String fieldName = field.getFieldName(); final Object value = record.getValue(field); if (value == null) { if (nullSuppression.equals(NEVER_SUPPRESS.getValue()) || (nullSuppression.equals(SUPPRESS_MISSING.getValue())) && record.getRawFieldNames().contains(fieldName)) { generator.writeNullField(fieldName); } continue; } generator.writeFieldName(fieldName); final DataType dataType = schema.getDataType(fieldName).get(); writeValue(generator, value, fieldName, dataType); } generator.writeEndObject(); }
final Map<String, Object> values = new HashMap<>(schema.getFieldCount());
public static Map<String, Object> convertAvroRecordToMap(final GenericRecord avroRecord, final RecordSchema recordSchema, final Charset charset) { final Map<String, Object> values = new HashMap<>(recordSchema.getFieldCount());
/** * Writes each Record as a SolrInputDocument. */ public static void writeRecord(final Record record, final SolrInputDocument inputDocument,final List<String> fieldsToIndex,String parentFieldName) throws IOException { RecordSchema schema = record.getSchema(); for (int i = 0; i < schema.getFieldCount(); i++) { final RecordField field = schema.getField(i); String fieldName; if(!StringUtils.isBlank(parentFieldName)) { // Prefixing parent field name fieldName = parentFieldName+"_"+field.getFieldName(); }else{ fieldName = field.getFieldName(); } final Object value = record.getValue(field); if (value == null) { continue; }else { final DataType dataType = schema.getDataType(field.getFieldName()).get(); writeValue(inputDocument, value, fieldName, dataType,fieldsToIndex); } } }
private Record convertJsonNodeToRecord(final JsonNode jsonNode, final RecordSchema schema, final String fieldNamePrefix, final boolean coerceTypes, final boolean dropUnknown) throws IOException, MalformedRecordException { final Map<String, Object> values = new HashMap<>(schema.getFieldCount() * 2);
private Record convertJsonNodeToRecord(final JsonNode jsonNode, final RecordSchema schema, final String fieldNamePrefix, final boolean coerceTypes, final boolean dropUnknown) throws IOException, MalformedRecordException { final Map<String, Object> values = new HashMap<>(schema.getFieldCount() * 2);
@Override public WriteResult write(final RecordSet rs) throws IOException { final int colCount = rs.getSchema().getFieldCount(); Assert.assertEquals(columnNames.size(), colCount); final List<String> colNames = new ArrayList<>(colCount); for (int i = 0; i < colCount; i++) { colNames.add(rs.getSchema().getField(i).getFieldName()); } Assert.assertEquals(columnNames, colNames); // Iterate over the rest of the records to ensure that we read the entire stream. If we don't // do this, we won't consume all of the data and as a result we will not close the stream properly Record record; while ((record = rs.next()) != null) { System.out.println(record); } return WriteResult.of(0, Collections.emptyMap()); }
final RecordSchema childSchema = recordDataType.getChildSchema(); final List<Field> childFields = new ArrayList<>(childSchema.getFieldCount()); for (final RecordField field : childSchema.getFields()) { childFields.add(buildAvroField(field));
) { final RecordSchema resultSchema = recordReader.getSchema(); assertEquals(3, resultSchema.getFieldCount());
private static Schema buildAvroSchema(final RecordSchema recordSchema) { final List<Field> avroFields = new ArrayList<>(recordSchema.getFieldCount()); for (final RecordField recordField : recordSchema.getFields()) { avroFields.add(buildAvroField(recordField)); } final Schema avroSchema = Schema.createRecord("nifiRecord", null, "org.apache.nifi", false, avroFields); return avroSchema; }
protected Record createRecord(final ResultSet rs) throws SQLException { final Map<String, Object> values = new HashMap<>(schema.getFieldCount()); for (final RecordField field : schema.getFields()) { final String fieldName = field.getFieldName(); final Object value; if (rsColumnNames.contains(fieldName)) { value = normalizeValue(rs.getObject(fieldName)); } else { value = null; } values.put(fieldName, value); } return new MapRecord(schema, values); }
@Override public Object[] getValues() { final Object[] values = new Object[schema.getFieldCount()]; int i = 0; for (final RecordField recordField : schema.getFields()) { Object value = getExplicitValue(recordField); if (value == null) { value = recordField.getDefaultValue(); } values[i++] = value; } return values; }
public static Map<String, Object> convertAvroRecordToMap(final GenericRecord avroRecord, final RecordSchema recordSchema, final Charset charset) { final Map<String, Object> values = new HashMap<>(recordSchema.getFieldCount());
/** * Writes each Record as a SolrInputDocument. */ public static void writeRecord(final Record record, final SolrInputDocument inputDocument,final List<String> fieldsToIndex,String parentFieldName) throws IOException { RecordSchema schema = record.getSchema(); for (int i = 0; i < schema.getFieldCount(); i++) { final RecordField field = schema.getField(i); String fieldName; if(!StringUtils.isBlank(parentFieldName)) { // Prefixing parent field name fieldName = parentFieldName+"_"+field.getFieldName(); }else{ fieldName = field.getFieldName(); } final Object value = record.getValue(field); if (value == null) { continue; }else { final DataType dataType = schema.getDataType(field.getFieldName()).get(); writeValue(inputDocument, value, fieldName, dataType,fieldsToIndex); } } }