SparkplugBPayloadBuilder outboundPayloadBuilder = new SparkplugBPayloadBuilder(getNextSeqNum()) .setTimestamp(new Date()); outboundPayloadBuilder.addMetric(new MetricBuilder("Node Control/Scan Rate ms", MetricDataType.Int32, scanRateMs).createMetric()); outboundPayloadBuilder.createPayload())); outboundPayloadBuilder.addMetric(new MetricBuilder("Outputs/e", MetricDataType.Boolean, pibrella.getOutputPin(PibrellaOutput.E).isHigh()).createMetric()); } else if (metric.getName().equals("Outputs/f")) { pibrella.getOutputPin(PibrellaOutput.F).setState((Boolean) metric.getValue()); outboundPayloadBuilder.addMetric(new MetricBuilder("Outputs/f", MetricDataType.Boolean, pibrella.getOutputPin(PibrellaOutput.F).isHigh()).createMetric()); } else if (metric.getName().equals("Outputs/g")) { pibrella.getOutputPin(PibrellaOutput.G).setState((Boolean) metric.getValue()); outboundPayloadBuilder.addMetric(new MetricBuilder("Outputs/g", MetricDataType.Boolean, pibrella.getOutputPin(PibrellaOutput.G).isHigh()).createMetric()); } else if (metric.getName().equals("Outputs/h")) { pibrella.getOutputPin(PibrellaOutput.H).setState((Boolean) metric.getValue()); outboundPayloadBuilder.addMetric(new MetricBuilder("Outputs/h", MetricDataType.Boolean, pibrella.getOutputPin(PibrellaOutput.H).isHigh()).createMetric()); } else if (metric.getName().equals("Outputs/LEDs/green")) { outboundPayloadBuilder.addMetric(new MetricBuilder("Outputs/LEDs/green", MetricDataType.Boolean, pibrella.ledGreen().isOn()).createMetric()); } else if (metric.getName().equals("Outputs/LEDs/red")) {
private static void divideAndAddMessages(Collection<Message> messages, Message message, int maxBytes) throws SparkplugException, JsonProcessingException { Topic topic = message.getTopic(); SparkplugBPayload payload = message.getPayload(); List<Metric> metrics = payload.getMetrics(); final int metricCount = message.getPayload().getMetricCount(); final int size = toJsonString(message).getBytes().length; // Check if the message can be divided if (metricCount <= 1) { throw new SparkplugException("Cannot divide SparkplugBPayload with only " + metricCount + " metric(s)"); } int newMessageCount = size / maxBytes + ((size % maxBytes > 0) ? 1 : 0); int metricsPerMessageCount = metricCount / newMessageCount + ((metricCount % newMessageCount > 0) ? 1 : 0); int index = 0; while (index < metricCount) { int toIndex = metricCount < (index + metricsPerMessageCount) ? metricCount : index + metricsPerMessageCount; // build a new Message with the payload containing the next subset (count) of metrics Message newMessage = new MessageBuilder(topic, new SparkplugBPayloadBuilder().setTimestamp(payload.getTimestamp()).setUuid(payload.getUuid()) .setSeq(payload.getSeq()).addMetrics(new ArrayList<Metric>(metrics.subList(index, toIndex))) .createPayload()).build(); String jsonMessage = toJsonString(newMessage); if (jsonMessage.getBytes().length < maxBytes) { messages.add(newMessage); } else { divideAndAddMessages(messages, newMessage, maxBytes); } index += metricsPerMessageCount; } }
SparkplugBPayloadBuilder payloadBuilder = new SparkplugBPayloadBuilder(getNextSeqNum()) .setTimestamp(new Date()) .addMetric(new MetricBuilder("bdSeq", MetricDataType.Int64, bdSeq) .createMetric()) .addMetric(new MetricBuilder("Up Time ms", MetricDataType.Int64, System.currentTimeMillis() - upTimeStart) .createMetric()) .addMetric(new MetricBuilder("Node Control/Next Server", MetricDataType.Boolean, false) .createMetric()) .addMetric(new MetricBuilder("Node Control/Rebirth", MetricDataType.Boolean, false) .createMetric()) .addMetric(new MetricBuilder("Node Control/Reboot", MetricDataType.Boolean, false) .createMetric()) .addMetric(new MetricBuilder("Node Control/Scan Rate ms", MetricDataType.Int32, scanRateMs) .createMetric()) .addMetric(new MetricBuilder("Properties/Board Type", MetricDataType.String, SystemInfo.getBoardType().toString())
SparkplugBPayloadBuilder deathPayload = new SparkplugBPayloadBuilder().setTimestamp(new Date()); deathPayload = addBdSeqNum(deathPayload); byte [] deathBytes = new SparkplugBPayloadEncoder().getBytes(deathPayload.createPayload());
SparkplugBPayload payload = new SparkplugBPayloadBuilder(getNextSeqNum()) .setTimestamp(new Date()) .addMetric(new MetricBuilder("bdSeq", MetricDataType.Int64, bdSeq) .createMetric()) .createPayload(); byte[] bytes = new SparkplugBPayloadEncoder().getBytes(payload);
SparkplugBPayloadBuilder deathPayload = new SparkplugBPayloadBuilder().setTimestamp(new Date()); deathPayload = addBdSeqNum(deathPayload); byte [] deathBytes = new SparkplugBPayloadEncoder().getBytes(deathPayload.createPayload());
SparkplugBPayloadBuilder deathPayload = new SparkplugBPayloadBuilder().setTimestamp(new Date()); deathPayload = addBdSeqNum(deathPayload); byte[] deathBytes = new SparkplugBPayloadEncoder().getBytes(deathPayload.createPayload());
SparkplugBPayload payload = new SparkplugBPayloadBuilder(getNextSeqNum()) .setTimestamp(new Date()) .addMetric(new MetricBuilder("Up Time ms", MetricDataType.Int64, System.currentTimeMillis() - upTimeStart) .createMetric()) .createPayload();
/** * * @param payload * @return * @throws IOException */ public static SparkplugBPayload compress(SparkplugBPayload payload, CompressionAlgorithm algorithm) throws IOException, SparkplugException { logger.trace("Compressing payload"); SparkplugBPayloadEncoder encoder = new SparkplugBPayloadEncoder(); // Encode bytes byte[] encoded = encoder.getBytes(payload); byte[] compressed = null; Metric algorithmMetric = new MetricBuilder(METRIC_ALGORITHM, MetricDataType.String, algorithm.toString()) .createMetric(); // Switch over compression algorithm switch (algorithm) { case GZIP: compressed = GZipUtil.compress(encoded); break; case DEFLATE: compressed = deflateBytes(encoded); break; default: throw new SparkplugException("Unknown or unsupported algorithm " + algorithm); } // Wrap and return the payload return new SparkplugBPayloadBuilder(payload.getSeq()).setBody(compressed).setUuid(UUID_COMPRESSED) .addMetric(algorithmMetric).createPayload(); }
new SparkplugBPayloadEncoder().getBytes(new SparkplugBPayloadBuilder() .addMetric(new MetricBuilder("Node Control/Rebirth", MetricDataType.Boolean, true) .createMetric()) .createPayload()), 0, false); return;
public void onStateChange(ButtonStateChangeEvent event) { try { synchronized (lock) { SparkplugBPayloadBuilder outboundPayloadBuilder = new SparkplugBPayloadBuilder(getNextSeqNum()) .setTimestamp(new Date()); if (event.getButton().getState() == ButtonState.PRESSED) { outboundPayloadBuilder.addMetric(new MetricBuilder("button", MetricDataType.Boolean, true).createMetric()); buttonCounter++; if (buttonCounter > buttonCounterSetpoint) { buttonCounter = 0; } outboundPayloadBuilder.addMetric(new MetricBuilder("button count", MetricDataType.Int32, buttonCounter).createMetric()); } else { outboundPayloadBuilder.addMetric(new MetricBuilder("button", MetricDataType.Boolean, false).createMetric()); } byte[] bytes = new SparkplugBPayloadEncoder().getBytes(outboundPayloadBuilder.createPayload()); client.publish(NAMESPACE + "/" + groupId + "/DDATA/" + edgeNode + "/" + deviceId, bytes, 0, false); } } catch (Exception e) { e.printStackTrace(); } } });
public SparkplugBPayload buildFromByteArray(byte[] bytes) throws Exception { SparkplugBProto.Payload protoPayload = SparkplugBProto.Payload.parseFrom(bytes); SparkplugBPayloadBuilder builder = new SparkplugBPayloadBuilder(protoPayload.getSeq()); // Set the timestamp if (protoPayload.hasTimestamp()) { builder.setTimestamp(new Date(protoPayload.getTimestamp())); } // Set the sequence number if (protoPayload.hasSeq()) { builder.setSeq(protoPayload.getSeq()); } // Set the Metrics for (SparkplugBProto.Payload.Metric protoMetric : protoPayload.getMetricsList()) { builder.addMetric(convertMetric(protoMetric)); } // Set the body if (protoPayload.hasBody()) { builder.setBody(protoPayload.getBody().toByteArray()); } // Set the body if (protoPayload.hasUuid()) { builder.setUuid(protoPayload.getUuid()); } return builder.createPayload(); }
public void handleGpioPinDigitalStateChangeEvent(GpioPinDigitalStateChangeEvent event) { try { synchronized (lock) { SparkplugBPayloadBuilder outboundPayloadBuilder = new SparkplugBPayloadBuilder(getNextSeqNum()) .setTimestamp(new Date()); if (event.getState() == PinState.HIGH) { outboundPayloadBuilder.addMetric(new MetricBuilder("Inputs/c", MetricDataType.Boolean, true).createMetric()); } else { outboundPayloadBuilder.addMetric(new MetricBuilder("Inputs/c", MetricDataType.Boolean, false).createMetric()); } byte[] bytes = new SparkplugBPayloadEncoder().getBytes(outboundPayloadBuilder.createPayload()); client.publish(NAMESPACE + "/" + groupId + "/DDATA/" + edgeNode + "/" + deviceId, bytes, 0, false); } } catch (Exception e) { e.printStackTrace(); } } });
public void handleGpioPinDigitalStateChangeEvent(GpioPinDigitalStateChangeEvent event) { try { synchronized (lock) { SparkplugBPayloadBuilder outboundPayloadBuilder = new SparkplugBPayloadBuilder(getNextSeqNum()) .setTimestamp(new Date()); if (event.getState() == PinState.HIGH) { outboundPayloadBuilder.addMetric(new MetricBuilder("Inputs/b", MetricDataType.Boolean, true).createMetric()); } else { outboundPayloadBuilder.addMetric(new MetricBuilder("Inputs/b", MetricDataType.Boolean, false).createMetric()); } byte[] bytes = new SparkplugBPayloadEncoder().getBytes(outboundPayloadBuilder.createPayload()); client.publish(NAMESPACE + "/" + groupId + "/DDATA/" + edgeNode + "/" + deviceId, bytes, 0, false); } } catch (Exception e) { e.printStackTrace(); } } });
public void handleGpioPinDigitalStateChangeEvent(GpioPinDigitalStateChangeEvent event) { try { synchronized (lock) { SparkplugBPayloadBuilder outboundPayloadBuilder = new SparkplugBPayloadBuilder(getNextSeqNum()) .setTimestamp(new Date()); if (event.getState() == PinState.HIGH) { outboundPayloadBuilder.addMetric(new MetricBuilder("Inputs/d", MetricDataType.Boolean, true).createMetric()); } else { outboundPayloadBuilder.addMetric(new MetricBuilder("Inputs/d", MetricDataType.Boolean, false).createMetric()); } byte[] bytes = new SparkplugBPayloadEncoder().getBytes(outboundPayloadBuilder.createPayload()); client.publish(NAMESPACE + "/" + groupId + "/DDATA/" + edgeNode + "/" + deviceId, bytes, 0, false); } } catch (Exception e) { e.printStackTrace(); } } });
public void handleGpioPinDigitalStateChangeEvent(GpioPinDigitalStateChangeEvent event) { try { synchronized (lock) { SparkplugBPayloadBuilder outboundPayloadBuilder = new SparkplugBPayloadBuilder(getNextSeqNum()) .setTimestamp(new Date()); if (event.getState() == PinState.HIGH) { outboundPayloadBuilder.addMetric(new MetricBuilder("Inputs/a", MetricDataType.Boolean, true).createMetric()); } else { outboundPayloadBuilder.addMetric(new MetricBuilder("Inputs/a", MetricDataType.Boolean, false).createMetric()); } byte[] bytes = new SparkplugBPayloadEncoder().getBytes(outboundPayloadBuilder.createPayload()); client.publish(NAMESPACE + "/" + groupId + "/DDATA/" + edgeNode + "/" + deviceId, bytes, 0, false); } } catch (Exception e) { e.printStackTrace(); } } });
/** * * @param payload * @return * @throws IOException */ public static SparkplugBPayload compress(SparkplugBPayload payload) throws IOException { logger.trace("Compressing payload"); SparkplugBPayloadEncoder encoder = new SparkplugBPayloadEncoder(); // Encode bytes byte[] encoded = encoder.getBytes(payload); // Default to DEFLATE byte[] compressedBytes = deflateBytes(encoded); // Create new payload, add the bytes as the body, and return. return new SparkplugBPayloadBuilder(payload.getSeq()).setBody(compressedBytes).setUuid(UUID_COMPRESSED) .createPayload(); }
private SparkplugBPayloadBuilder addBdSeqNum(SparkplugBPayloadBuilder payload) throws Exception { if (payload == null) { payload = new SparkplugBPayloadBuilder(); } if (bdSeq == 256) { bdSeq = 0; } payload.addMetric(new MetricBuilder("bdSeq", Int64, (long)bdSeq).createMetric()); bdSeq++; return payload; }
private SparkplugBPayloadBuilder addBdSeqNum(SparkplugBPayloadBuilder payload) throws Exception { if (payload == null) { payload = new SparkplugBPayloadBuilder(); } if (bdSeq == 256) { bdSeq = 0; } payload.addMetric(new MetricBuilder("bdSeq", Int64, (long)bdSeq).createMetric()); bdSeq++; return payload; }
private SparkplugBPayloadBuilder addBdSeqNum(SparkplugBPayloadBuilder payload) throws Exception { if (payload == null) { payload = new SparkplugBPayloadBuilder(); } if (bdSeq == 256) { bdSeq = 0; } payload.addMetric(new MetricBuilder("bdSeq", Int64, (long) bdSeq).createMetric()); bdSeq++; return payload; }