From 38951d439c2047b02e76634f5b2c58a52b1a5a9f Mon Sep 17 00:00:00 2001 From: Gavin Miller Date: Mon, 24 Nov 2025 09:35:45 -0700 Subject: [PATCH 1/2] Add properties handling --- src/pysparkplug/_metric.py | 7 +++ src/pysparkplug/_properties.py | 80 ++++++++++++++++++++++++++++++ test/unit_tests/test_metric.py | 26 +++++++++- test/unit_tests/test_properties.py | 26 ++++++++++ 4 files changed, 138 insertions(+), 1 deletion(-) create mode 100644 src/pysparkplug/_properties.py create mode 100644 test/unit_tests/test_properties.py diff --git a/src/pysparkplug/_metric.py b/src/pysparkplug/_metric.py index 8020735..52767d4 100644 --- a/src/pysparkplug/_metric.py +++ b/src/pysparkplug/_metric.py @@ -5,6 +5,7 @@ from pysparkplug._datatype import DataType from pysparkplug._metadata import Metadata +from pysparkplug._properties import PropertySet from pysparkplug._protobuf import Metric as PB_Metric from pysparkplug._types import MetricValue, Self @@ -34,12 +35,15 @@ class Metric: if this is null - explicitly say so rather than using -1, false, etc metadata: optional metadata for this metric + properties: + optional properties for this metric """ timestamp: Optional[int] name: Optional[str] datatype: DataType metadata: Optional[Metadata] = None + properties: Optional[PropertySet] = None value: Optional[MetricValue] = None alias: Optional[int] = None is_historical: bool = False @@ -62,6 +66,8 @@ def to_pb(self, include_dtype: bool) -> PB_Metric: # type: ignore[reportInvalid metric.datatype = self.datatype if self.metadata is not None: metric.metadata.CopyFrom(self.metadata.to_pb()) + if self.properties is not None: + metric.properties.CopyFrom(self.properties.to_pb()) if self.alias is not None: metric.alias = self.alias if self.is_historical: @@ -104,4 +110,5 @@ def from_pb(cls, metric: PB_Metric) -> Self: # type: ignore[reportInvalidTypeFo metadata=Metadata.from_pb(metric.metadata) if metric.HasField("metadata") else None, + properties=PropertySet.from_pb(metric.properties), ) diff --git a/src/pysparkplug/_properties.py b/src/pysparkplug/_properties.py new file mode 100644 index 0000000..bde3ffc --- /dev/null +++ b/src/pysparkplug/_properties.py @@ -0,0 +1,80 @@ +import dataclasses +from typing import Optional, Union + +from pysparkplug._datatype import DataType +from pysparkplug._protobuf import ( + PropertySet as PB_PropertySet, +) +from pysparkplug._protobuf import ( + PropertyValue as PB_PropertyValue, +) +from pysparkplug._types import Self + +__all__ = ["PropertySet", "PropertyValue"] + + +@dataclasses.dataclass(frozen=True) +class PropertyValue: + type: DataType + value: Optional[ + Union[int, float, bool, str, "PropertySet", tuple["PropertySet", ...]] + ] = None + is_null: Optional[bool] = None + + def to_pb(self, include_dtype: bool) -> PB_PropertyValue: # type: ignore[reportInvalidTypeForm] + property_value = PB_PropertyValue() + if include_dtype: + property_value.type = self.type + if self.is_null or self.value is None: + property_value.is_null = True + elif isinstance(self.value, PropertySet): + setattr(property_value, "propertyset_value", self.value.to_pb()) + elif isinstance(self.value, tuple): + setattr( + property_value, + "propertysets_value", + (value.to_pb() for value in self.value), + ) + else: + setattr(property_value, self.type.field, self.value) + return property_value + + @classmethod + def from_pb(cls, property_value: PB_PropertyValue) -> Self: # type: ignore[reportInvalidTypeForm] + valueType = ( + property_value.type if property_value.HasField("type") else DataType.UNKNOWN + ) + is_null = property_value.is_null if property_value.HasField("is_null") else None + if is_null: + value = None + else: + value_field = property_value.WhichOneof("value") + if value_field == "PropertSet": + value = PropertySet.from_pb(getattr(property_value, value_field)) + elif value_field == "PropertySetList": + value = tuple(PropertySet.from_pb(val) for val in property_value.value) + else: + value = getattr(property_value, value_field) + + return cls(type=valueType, is_null=is_null, value=value) + + +@dataclasses.dataclass(frozen=True) +class PropertySet: + keys: tuple[str, ...] + values: tuple[PropertyValue, ...] + + def to_pb(self) -> PB_PropertySet: # type: ignore[reportInvalidTypeForm] + property_set = PB_PropertySet() + property_set.keys.extend(self.keys) + property_set.values.extend( + value.to_pb(value.type is not None) for value in self.values + ) + return property_set + + @classmethod + def from_pb(cls, property_set: PB_PropertySet) -> Self: # type: ignore[reportInvalidTypeForm] + return cls( + keys=tuple(property_set.keys), + values=tuple(PropertyValue.from_pb(val) for val in property_set.values), + ) diff --git a/test/unit_tests/test_metric.py b/test/unit_tests/test_metric.py index ffa9e7d..74a92b5 100644 --- a/test/unit_tests/test_metric.py +++ b/test/unit_tests/test_metric.py @@ -4,6 +4,7 @@ from datetime import datetime, timezone from pysparkplug import DataType, Metadata, Metric +from src.pysparkplug._properties import PropertySet, PropertyValue class TestMetric(unittest.TestCase): @@ -66,7 +67,7 @@ def test_array_null_handling(self): else: self.assertEqual(metric2.value, value) - def test_metric_properties(self): + def test_metric_flags(self): """Test metric property handling""" metric = Metric( timestamp=1234567890, @@ -224,3 +225,26 @@ def test_metric_with_partial_metadata(self): self.assertIsNone(metric2.metadata.file_type) self.assertIsNone(metric2.metadata.md5) self.assertIsNone(metric2.metadata.description) + + def test_metric_with_properties(self): + """Test metric with properties""" + metric = Metric( + timestamp=1234567890, + name="test_metric", + datatype=DataType.INT32, + value=42, + properties=PropertySet( + keys=("int",), values=(PropertyValue(type=DataType.INT32, value=123),) + ), + ) + + pb = metric.to_pb(include_dtype=True) + self.assertEqual(len(pb.properties.keys), len(pb.properties.values)) + self.assertEqual(["int"], pb.properties.keys) + self.assertEqual(123, pb.properties.values[0].int_value) + + metric2 = metric.from_pb(pb) + assert metric2.properties is not None + self.assertEqual(len(metric2.properties.keys), len(metric2.properties.values)) + self.assertEqual(("int",), metric2.properties.keys) + self.assertEqual(123, metric2.properties.values[0].value) diff --git a/test/unit_tests/test_properties.py b/test/unit_tests/test_properties.py new file mode 100644 index 0000000..acc2172 --- /dev/null +++ b/test/unit_tests/test_properties.py @@ -0,0 +1,26 @@ +import unittest + +from src.pysparkplug._datatype import DataType +from src.pysparkplug._properties import PropertySet, PropertyValue + + +class TestProperties(unittest.TestCase): + def test_basic_property(self): + properties = PropertySet( + keys=("int",), values=(PropertyValue(type=DataType.INT32, value=123),) + ) + pb = properties.to_pb() + self.assertEqual(len(pb.keys), len(pb.values)) + self.assertEqual(["int"], pb.keys) + self.assertEqual(123, pb.values[0].int_value) + + def test_nulls(self): + properties = PropertySet( + keys=("anything",), values=(PropertyValue(type=DataType.FLOAT, value=None),) + ) + pb = properties.to_pb() + self.assertEqual(["anything"], pb.keys) + self.assertEqual(True, pb.values[0].is_null) + properties2 = PropertySet.from_pb(pb) + self.assertEqual(True, properties2.values[0].is_null) + self.assertEqual(None, properties2.values[0].value) From ba08d26ba2a7778954f66b0c3d91ca8aabdde9d0 Mon Sep 17 00:00:00 2001 From: Gavin Miller Date: Mon, 24 Nov 2025 12:04:54 -0700 Subject: [PATCH 2/2] Add docstrings --- docs/conf.py | 2 ++ src/pysparkplug/__init__.py | 3 +++ src/pysparkplug/_datatype.py | 10 ++++++---- src/pysparkplug/_properties.py | 22 ++++++++++++++++++++++ 4 files changed, 33 insertions(+), 4 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 5862385..98f2887 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -56,6 +56,8 @@ ("py:class", "pysparkplug._strenum.StrEnum"), ("py:class", "sparkplug_b_pb2.Metric"), ("py:class", "sparkplug_b_pb2.MetaData"), + ("py:class", "sparkplug_b_pb2.PropertySet"), + ("py:class", "sparkplug_b_pb2.PropertyValue"), ("py:class", "ssl._SSLMethod"), ] diff --git a/src/pysparkplug/__init__.py b/src/pysparkplug/__init__.py index 709114d..4f15c12 100644 --- a/src/pysparkplug/__init__.py +++ b/src/pysparkplug/__init__.py @@ -28,6 +28,7 @@ NDeath, State, ) +from src.pysparkplug._properties import PropertySet, PropertyValue from pysparkplug._time import get_current_timestamp from pysparkplug._topic import Topic from pysparkplug._version import __version__ @@ -57,6 +58,8 @@ "NCmd", "NData", "NDeath", + "PropertySet", + "PropertyValue", "QoS", "State", "TLSConfig", diff --git a/src/pysparkplug/_datatype.py b/src/pysparkplug/_datatype.py index 0bd3c3d..31328e4 100644 --- a/src/pysparkplug/_datatype.py +++ b/src/pysparkplug/_datatype.py @@ -122,10 +122,12 @@ def _int_encoder(value: int, bits: int) -> int: def _int_decoder(value: int, bits: int) -> int: - max_val: int = 2**bits - if not 0 <= value < max_val: - raise OverflowError(f"Int{bits} overflow with value {value}") - return value - (max_val if value >= 2 ** (bits - 1) else 0) + mask = (1 << bits) - 1 # e.g. 0xFFFF for 16 bits + value &= mask # drop any higher bits + sign_bit = 1 << (bits - 1) # e.g. 0x8000 for 16 bits + if value & sign_bit: # if sign bit is set + value -= 1 << bits + return value def _encode_numeric_array(values: Sequence[float], format_char: str) -> bytes: diff --git a/src/pysparkplug/_properties.py b/src/pysparkplug/_properties.py index bde3ffc..14dc557 100644 --- a/src/pysparkplug/_properties.py +++ b/src/pysparkplug/_properties.py @@ -1,3 +1,5 @@ +"""Module defining the PropertySet and PropertyValue dataclasses""" + import dataclasses from typing import Optional, Union @@ -15,6 +17,17 @@ @dataclasses.dataclass(frozen=True) class PropertyValue: + """Class representing a Sparkplug B property value + + Args: + type: + datatype associated with this property value + value: + the value of the property + is_null: + if this is null - explicitly say so rather than using -1, false, etc + """ + type: DataType value: Optional[ Union[int, float, bool, str, "PropertySet", tuple["PropertySet", ...]] @@ -61,6 +74,15 @@ def from_pb(cls, property_value: PB_PropertyValue) -> Self: # type: ignore[repo @dataclasses.dataclass(frozen=True) class PropertySet: + """Class representing an array of Sparkplug B properties + + Args: + keys: + names of the properties + values: + values of the properties + """ + keys: tuple[str, ...] values: tuple[PropertyValue, ...]