@Override public RecordSchema getSchema() { return new SimpleRecordSchema(fields); } };
@Override public RecordSchema getSchema() { return new SimpleRecordSchema(fields); } };
@Override public RecordSchema getSchema(Map<String, String> variables, RecordSchema readSchema) throws SchemaNotFoundException, IOException { return new SimpleRecordSchema(Collections.emptyList()); }
@Override public Record nextRecord(final boolean coerceTypes, final boolean dropUnknown) throws IOException, MalformedRecordException { if (failAfterN >= 0 && recordCount >= failAfterN) { throw new MalformedRecordException("Intentional Unit Test Exception because " + recordCount + " records have been read"); } recordCount++; if (!itr.hasNext()) { return null; } final Object[] values = itr.next(); final Map<String, Object> valueMap = new HashMap<>(); int i = 0; for (final RecordField field : fields) { final String fieldName = field.getFieldName(); valueMap.put(fieldName, values[i++]); } return new MapRecord(new SimpleRecordSchema(fields), valueMap); }
@Override public Record nextRecord(final boolean coerceTypes, final boolean dropUnknown) throws IOException, MalformedRecordException { if (failAfterN > -1 && recordCount >= failAfterN) { throw new MalformedRecordException("Intentional Unit Test Exception because " + recordCount + " records have been read"); } final String nextLine = reader.readLine(); if (nextLine == null) { return null; } recordCount++; final String[] values = nextLine.split(","); final Map<String, Object> valueMap = new HashMap<>(); int i = 0; for (final RecordField field : fields) { final String fieldName = field.getFieldName(); valueMap.put(fieldName, values[i++].trim()); } return new MapRecord(new SimpleRecordSchema(fields), valueMap); }
private static RecordSchema createSchema(final ResultSet rs, final RecordSchema readerSchema) throws SQLException { final ResultSetMetaData metadata = rs.getMetaData(); final int numCols = metadata.getColumnCount(); final List<RecordField> fields = new ArrayList<>(numCols); for (int i = 0; i < numCols; i++) { final int column = i + 1; final int sqlType = metadata.getColumnType(column); final DataType dataType = getDataType(sqlType, rs, column, readerSchema); final String fieldName = metadata.getColumnLabel(column); final int nullableFlag = metadata.isNullable(column); final boolean nullable; if (nullableFlag == ResultSetMetaData.columnNoNulls) { nullable = false; } else { nullable = true; } final RecordField field = new RecordField(fieldName, dataType, nullable); fields.add(field); } return new SimpleRecordSchema(fields); }
private Object getReplacementObject(final List<FieldValue> selectedFields) { if (selectedFields.size() > 1) { final List<RecordField> fields = selectedFields.stream().map(FieldValue::getField).collect(Collectors.toList()); final RecordSchema schema = new SimpleRecordSchema(fields); final Record record = new MapRecord(schema, new HashMap<>()); for (final FieldValue fieldVal : selectedFields) { record.setValue(fieldVal.getField().getFieldName(), fieldVal.getValue()); } return record; } if (selectedFields.isEmpty()) { return null; } else { return selectedFields.get(0).getValue(); } } }
static RecordSchema createRecordSchema(final Grok grok) { final List<RecordField> fields = new ArrayList<>(); String grokExpression = grok.getOriginalGrokPattern(); populateSchemaFieldNames(grok, grokExpression, fields); fields.add(new RecordField(GrokRecordReader.STACK_TRACE_COLUMN_NAME, RecordFieldType.STRING.getDataType(), true)); fields.add(new RecordField(GrokRecordReader.RAW_MESSAGE_NAME, RecordFieldType.STRING.getDataType(), true)); final RecordSchema schema = new SimpleRecordSchema(fields); return schema; }
@Override public RecordSchema getSchema(Map<String, String> variables, RecordSchema readSchema) throws SchemaNotFoundException, IOException { final List<RecordField> recordFields = columnNames.stream() .map(name -> new RecordField(name, RecordFieldType.STRING.getDataType())) .collect(Collectors.toList()); return new SimpleRecordSchema(recordFields); }
private Record updateRecord(final List<FieldValue> destinationFields, final List<FieldValue> selectedFields, final Record record) { if (destinationFields.size() == 1 && !destinationFields.get(0).getParentRecord().isPresent()) { final Object replacement = getReplacementObject(selectedFields); if (replacement == null) { return record; } if (replacement instanceof Record) { return (Record) replacement; } final List<RecordField> fields = selectedFields.stream().map(FieldValue::getField).collect(Collectors.toList()); final RecordSchema schema = new SimpleRecordSchema(fields); final Record mapRecord = new MapRecord(schema, new HashMap<>()); for (final FieldValue selectedField : selectedFields) { mapRecord.setValue(selectedField.getField().getFieldName(), selectedField.getValue()); } return mapRecord; } else { for (final FieldValue fieldVal : destinationFields) { fieldVal.updateValue(getReplacementObject(selectedFields)); } return record; } }
protected RecordSchema convertSchema(Map<String, Object> result) { List<RecordField> fields = new ArrayList<>(); for (Map.Entry<String, Object> entry : result.entrySet()) { RecordField field; if (entry.getValue() instanceof Integer) { field = new RecordField(entry.getKey(), RecordFieldType.INT.getDataType()); } else if (entry.getValue() instanceof Long) { field = new RecordField(entry.getKey(), RecordFieldType.LONG.getDataType()); } else if (entry.getValue() instanceof Boolean) { field = new RecordField(entry.getKey(), RecordFieldType.BOOLEAN.getDataType()); } else if (entry.getValue() instanceof Double) { field = new RecordField(entry.getKey(), RecordFieldType.DOUBLE.getDataType()); } else if (entry.getValue() instanceof Date) { field = new RecordField(entry.getKey(), RecordFieldType.DATE.getDataType()); } else if (entry.getValue() instanceof List) { field = new RecordField(entry.getKey(), RecordFieldType.ARRAY.getDataType()); } else if (entry.getValue() instanceof Map) { RecordSchema nestedSchema = convertSchema((Map)entry.getValue()); RecordDataType rdt = new RecordDataType(nestedSchema); field = new RecordField(entry.getKey(), rdt); } else { field = new RecordField(entry.getKey(), RecordFieldType.STRING.getDataType()); } fields.add(field); } return new SimpleRecordSchema(fields); }
@Override public RecordSchema getSchema(Map<String, String> variables, final InputStream contentStream, final RecordSchema readSchema) throws SchemaNotFoundException { if (this.context == null) { throw new SchemaNotFoundException("Schema Access Strategy intended only for validation purposes and cannot obtain schema"); } try { final CSVFormat csvFormat = CSVUtils.createCSVFormat(context).withFirstRecordAsHeader(); try (final Reader reader = new InputStreamReader(new BOMInputStream(contentStream)); final CSVParser csvParser = new CSVParser(reader, csvFormat)) { final List<RecordField> fields = new ArrayList<>(); for (final String columnName : csvParser.getHeaderMap().keySet()) { fields.add(new RecordField(columnName, RecordFieldType.STRING.getDataType(), true)); } return new SimpleRecordSchema(fields); } } catch (final Exception e) { throw new SchemaNotFoundException("Failed to read Header line from CSV", e); } }
private RecordSchema getTransactionSchema() { final List<RecordField> transactionFields = new ArrayList<>(); transactionFields.add(new RecordField("id", RecordFieldType.INT.getDataType())); transactionFields.add(new RecordField("amount", RecordFieldType.DOUBLE.getDataType())); return new SimpleRecordSchema(transactionFields); }
private RecordSchema getAccountSchema() { final List<RecordField> accountFields = new ArrayList<>(); accountFields.add(new RecordField("id", RecordFieldType.INT.getDataType())); accountFields.add(new RecordField("balance", RecordFieldType.DOUBLE.getDataType())); return new SimpleRecordSchema(accountFields); }
static RecordSchema createRecordSchema() { final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField(SyslogAttributes.PRIORITY.key(), RecordFieldType.STRING.getDataType(), true)); fields.add(new RecordField(SyslogAttributes.SEVERITY.key(), RecordFieldType.STRING.getDataType(), true)); fields.add(new RecordField(SyslogAttributes.FACILITY.key(), RecordFieldType.STRING.getDataType(), true)); fields.add(new RecordField(SyslogAttributes.VERSION.key(), RecordFieldType.STRING.getDataType(), true)); fields.add(new RecordField(SyslogAttributes.TIMESTAMP.key(), RecordFieldType.STRING.getDataType(), true)); fields.add(new RecordField(SyslogAttributes.HOSTNAME.key(), RecordFieldType.STRING.getDataType(), true)); fields.add(new RecordField(SyslogAttributes.BODY.key(), RecordFieldType.STRING.getDataType(), true)); SchemaIdentifier schemaIdentifier = new StandardSchemaIdentifier.Builder().name(GENERIC_SYSLOG_SCHEMA_NAME).build(); final RecordSchema schema = new SimpleRecordSchema(fields,schemaIdentifier); return schema; }
private RecordSchema getAccountWithTransactionSchema() { final List<RecordField> accountFields = new ArrayList<>(); accountFields.add(new RecordField("id", RecordFieldType.INT.getDataType())); accountFields.add(new RecordField("balance", RecordFieldType.DOUBLE.getDataType())); final DataType transactionRecordType = RecordFieldType.RECORD.getRecordDataType(getTransactionSchema()); final DataType transactionsType = RecordFieldType.ARRAY.getArrayDataType(transactionRecordType); accountFields.add(new RecordField("transactions", transactionsType)); return new SimpleRecordSchema(accountFields); }
static RecordSchema createRecordSchema() { final List<RecordField> fields = new ArrayList<>(); fields.add(new RecordField(SyslogAttributes.PRIORITY.key(), RecordFieldType.STRING.getDataType(), true)); fields.add(new RecordField(SyslogAttributes.SEVERITY.key(), RecordFieldType.STRING.getDataType(), true)); fields.add(new RecordField(SyslogAttributes.FACILITY.key(), RecordFieldType.STRING.getDataType(), true)); fields.add(new RecordField(SyslogAttributes.VERSION.key(), RecordFieldType.STRING.getDataType(), true)); fields.add(new RecordField(SyslogAttributes.TIMESTAMP.key(), RecordFieldType.TIMESTAMP.getDataType(), true)); fields.add(new RecordField(SyslogAttributes.HOSTNAME.key(), RecordFieldType.STRING.getDataType(), true)); fields.add(new RecordField(SyslogAttributes.BODY.key(), RecordFieldType.STRING.getDataType(), true)); fields.add(new RecordField(Syslog5424Attributes.APP_NAME.key(), RecordFieldType.STRING.getDataType(), true)); fields.add(new RecordField(Syslog5424Attributes.PROCID.key(), RecordFieldType.STRING.getDataType(), true)); fields.add(new RecordField(Syslog5424Attributes.MESSAGEID.key(), RecordFieldType.STRING.getDataType(), true)); fields.add(new RecordField(Syslog5424Attributes.STRUCTURED_BASE.key(), RecordFieldType.MAP.getMapDataType(RecordFieldType.MAP.getMapDataType(RecordFieldType.STRING.getDataType())))); SchemaIdentifier schemaIdentifier = new StandardSchemaIdentifier.Builder().name(RFC_5424_SCHEMA_NAME).build(); final RecordSchema schema = new SimpleRecordSchema(fields,schemaIdentifier); return schema; }
final RecordSchema schema = new SimpleRecordSchema(fields); final RecordSchema schemaWrite = new SimpleRecordSchema(fieldsWrite);
@Test public void testForkExtractSimpleWithParentFields() throws IOException, MalformedRecordException, InitializationException { TestRunner runner = TestRunners.newTestRunner(new ForkRecord()); final DataType accountRecordType = RecordFieldType.RECORD.getRecordDataType(getAccountSchema()); final DataType accountsType = RecordFieldType.ARRAY.getArrayDataType(accountRecordType); final List<RecordField> fields = getDefaultFields(); fields.add(new RecordField("accounts", accountsType)); final RecordSchema schema = new SimpleRecordSchema(fields); final List<RecordField> fieldsWrite = getDefaultFields(); fieldsWrite.add(new RecordField("balance", RecordFieldType.DOUBLE.getDataType())); final RecordSchema schemaWrite = new SimpleRecordSchema(fieldsWrite); final JsonRecordReader readerService = new JsonRecordReader(schema); final MockRecordWriter writerService = new CustomRecordWriter("header", false, schemaWrite); runner.addControllerService("reader", readerService); runner.enableControllerService(readerService); runner.addControllerService("writer", writerService); runner.enableControllerService(writerService); runner.setProperty(ForkRecord.RECORD_READER, "reader"); runner.setProperty(ForkRecord.RECORD_WRITER, "writer"); runner.setProperty(ForkRecord.MODE, ForkRecord.MODE_EXTRACT); runner.setProperty(ForkRecord.INCLUDE_PARENT_FIELDS, "true"); runner.setProperty("my-path", "/accounts"); runner.enqueue(new File("src/test/resources/TestForkRecord/single-element-nested-array.json").toPath()); runner.run(1); runner.assertTransferCount(ForkRecord.REL_ORIGINAL, 1); runner.assertTransferCount(ForkRecord.REL_FORK, 1); final MockFlowFile mff = runner.getFlowFilesForRelationship(ForkRecord.REL_FORK).get(0); mff.assertAttributeEquals("record.count", "2"); mff.assertContentEquals("header\n42,4750.89,John Doe,123 My Street,My City,MS,11111,USA\n43,48212.38,John Doe,123 My Street,My City,MS,11111,USA\n"); }
@Test public void testForkExtractSimpleWithoutParentFields() throws IOException, MalformedRecordException, InitializationException { TestRunner runner = TestRunners.newTestRunner(new ForkRecord()); final DataType accountRecordType = RecordFieldType.RECORD.getRecordDataType(getAccountSchema()); final DataType accountsType = RecordFieldType.ARRAY.getArrayDataType(accountRecordType); final List<RecordField> fields = getDefaultFields(); fields.add(new RecordField("accounts", accountsType)); final RecordSchema schema = new SimpleRecordSchema(fields); final JsonRecordReader readerService = new JsonRecordReader(schema); final MockRecordWriter writerService = new CustomRecordWriter("header", false, getAccountSchema()); runner.addControllerService("reader", readerService); runner.enableControllerService(readerService); runner.addControllerService("writer", writerService); runner.enableControllerService(writerService); runner.setProperty(ForkRecord.RECORD_READER, "reader"); runner.setProperty(ForkRecord.RECORD_WRITER, "writer"); runner.setProperty(ForkRecord.MODE, ForkRecord.MODE_EXTRACT); runner.setProperty("my-path", "/accounts"); runner.enqueue(new File("src/test/resources/TestForkRecord/single-element-nested-array.json").toPath()); runner.run(1); runner.assertTransferCount(ForkRecord.REL_ORIGINAL, 1); runner.assertTransferCount(ForkRecord.REL_FORK, 1); final MockFlowFile mff = runner.getFlowFilesForRelationship(ForkRecord.REL_FORK).get(0); mff.assertAttributeEquals("record.count", "2"); mff.assertContentEquals("header\n42,4750.89\n43,48212.38\n"); }