public void validate() throws SparkplugException { if (columnNames.size() != numOfColumns) { throw new SparkplugException("Invalid number of columns in data set column names: " + columnNames.size() + " vs expected " + numOfColumns); } if (types.size() != numOfColumns) { throw new SparkplugException("Invalid number of columns in data set types: " + types.size() + " vs expected: " + numOfColumns); } for (int i = 0; i < types.size(); i++) { for (Row row : rows) { List<Value<?>> values = row.getValues(); if (values.size() != numOfColumns) { throw new SparkplugException("Invalid number of columns in data set row: " + values.size() + " vs expected: " + numOfColumns); } types.get(i).checkType(row.getValues().get(i).getValue()); } } } }
private Collection<DataSetDataType> convertDataSetDataTypes(List<Integer> protoTypes) { List<DataSetDataType> types = new ArrayList<DataSetDataType>(); // Build up a List of column types for (int type : protoTypes) { types.add(DataSetDataType.fromInteger(type)); } return types; }
if (typesNode.isArray()) { for (JsonNode typeNode : typesNode) { typesList.add(DataSetDataType.valueOf(typeNode.textValue()));
for (DataSetDataType type : columnTypes) { dataSetBuilder.addTypes(type.toIntValue());
private Value<?> convertDataSetValue(int protoType, SparkplugBProto.Payload.DataSet.DataSetValue protoValue) throws Exception { DataSetDataType type = DataSetDataType.fromInteger(protoType); switch (type) { case Boolean: