Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions docs/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,8 @@
("py:class", "pysparkplug._strenum.StrEnum"),
("py:class", "sparkplug_b_pb2.Metric"),
("py:class", "sparkplug_b_pb2.MetaData"),
("py:class", "sparkplug_b_pb2.PropertySet"),
("py:class", "sparkplug_b_pb2.PropertyValue"),
("py:class", "ssl._SSLMethod"),
]

Expand Down
3 changes: 3 additions & 0 deletions src/pysparkplug/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
NDeath,
State,
)
from src.pysparkplug._properties import PropertySet, PropertyValue
from pysparkplug._time import get_current_timestamp
from pysparkplug._topic import Topic
from pysparkplug._version import __version__
Expand Down Expand Up @@ -57,6 +58,8 @@
"NCmd",
"NData",
"NDeath",
"PropertySet",
"PropertyValue",
"QoS",
"State",
"TLSConfig",
Expand Down
10 changes: 6 additions & 4 deletions src/pysparkplug/_datatype.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,10 +122,12 @@ def _int_encoder(value: int, bits: int) -> int:


def _int_decoder(value: int, bits: int) -> int:
max_val: int = 2**bits
if not 0 <= value < max_val:
raise OverflowError(f"Int{bits} overflow with value {value}")
return value - (max_val if value >= 2 ** (bits - 1) else 0)
mask = (1 << bits) - 1 # e.g. 0xFFFF for 16 bits
value &= mask # drop any higher bits
sign_bit = 1 << (bits - 1) # e.g. 0x8000 for 16 bits
if value & sign_bit: # if sign bit is set
value -= 1 << bits
return value


def _encode_numeric_array(values: Sequence[float], format_char: str) -> bytes:
Expand Down
7 changes: 7 additions & 0 deletions src/pysparkplug/_metric.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

from pysparkplug._datatype import DataType
from pysparkplug._metadata import Metadata
from pysparkplug._properties import PropertySet
from pysparkplug._protobuf import Metric as PB_Metric
from pysparkplug._types import MetricValue, Self

Expand Down Expand Up @@ -34,12 +35,15 @@ class Metric:
if this is null - explicitly say so rather than using -1, false, etc
metadata:
optional metadata for this metric
properties:
optional properties for this metric
"""

timestamp: Optional[int]
name: Optional[str]
datatype: DataType
metadata: Optional[Metadata] = None
properties: Optional[PropertySet] = None
value: Optional[MetricValue] = None
alias: Optional[int] = None
is_historical: bool = False
Expand All @@ -62,6 +66,8 @@ def to_pb(self, include_dtype: bool) -> PB_Metric: # type: ignore[reportInvalid
metric.datatype = self.datatype
if self.metadata is not None:
metric.metadata.CopyFrom(self.metadata.to_pb())
if self.properties is not None:
metric.properties.CopyFrom(self.properties.to_pb())
if self.alias is not None:
metric.alias = self.alias
if self.is_historical:
Expand Down Expand Up @@ -104,4 +110,5 @@ def from_pb(cls, metric: PB_Metric) -> Self: # type: ignore[reportInvalidTypeFo
metadata=Metadata.from_pb(metric.metadata)
if metric.HasField("metadata")
else None,
properties=PropertySet.from_pb(metric.properties),
)
102 changes: 102 additions & 0 deletions src/pysparkplug/_properties.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
"""Module defining the PropertySet and PropertyValue dataclasses"""

import dataclasses
from typing import Optional, Union

from pysparkplug._datatype import DataType
from pysparkplug._protobuf import (
PropertySet as PB_PropertySet,
)
from pysparkplug._protobuf import (
PropertyValue as PB_PropertyValue,
)
from pysparkplug._types import Self

__all__ = ["PropertySet", "PropertyValue"]


@dataclasses.dataclass(frozen=True)
class PropertyValue:
"""Class representing a Sparkplug B property value

Args:
type:
datatype associated with this property value
value:
the value of the property
is_null:
if this is null - explicitly say so rather than using -1, false, etc
"""

type: DataType
value: Optional[
Union[int, float, bool, str, "PropertySet", tuple["PropertySet", ...]]
] = None
is_null: Optional[bool] = None

def to_pb(self, include_dtype: bool) -> PB_PropertyValue: # type: ignore[reportInvalidTypeForm]
property_value = PB_PropertyValue()
if include_dtype:
property_value.type = self.type
if self.is_null or self.value is None:
property_value.is_null = True
elif isinstance(self.value, PropertySet):
setattr(property_value, "propertyset_value", self.value.to_pb())
elif isinstance(self.value, tuple):
setattr(
property_value,
"propertysets_value",
(value.to_pb() for value in self.value),
)
else:
setattr(property_value, self.type.field, self.value)
return property_value

@classmethod
def from_pb(cls, property_value: PB_PropertyValue) -> Self: # type: ignore[reportInvalidTypeForm]
valueType = (
property_value.type if property_value.HasField("type") else DataType.UNKNOWN
)
is_null = property_value.is_null if property_value.HasField("is_null") else None
if is_null:
value = None
else:
value_field = property_value.WhichOneof("value")
if value_field == "PropertSet":
value = PropertySet.from_pb(getattr(property_value, value_field))
elif value_field == "PropertySetList":
value = tuple(PropertySet.from_pb(val) for val in property_value.value)
else:
value = getattr(property_value, value_field)

return cls(type=valueType, is_null=is_null, value=value)


@dataclasses.dataclass(frozen=True)
class PropertySet:
"""Class representing an array of Sparkplug B properties

Args:
keys:
names of the properties
values:
values of the properties
"""

keys: tuple[str, ...]
values: tuple[PropertyValue, ...]

def to_pb(self) -> PB_PropertySet: # type: ignore[reportInvalidTypeForm]
property_set = PB_PropertySet()
property_set.keys.extend(self.keys)
property_set.values.extend(
value.to_pb(value.type is not None) for value in self.values
)
return property_set

@classmethod
def from_pb(cls, property_set: PB_PropertySet) -> Self: # type: ignore[reportInvalidTypeForm]
return cls(
keys=tuple(property_set.keys),
values=tuple(PropertyValue.from_pb(val) for val in property_set.values),
)
26 changes: 25 additions & 1 deletion test/unit_tests/test_metric.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from datetime import datetime, timezone

from pysparkplug import DataType, Metadata, Metric
from src.pysparkplug._properties import PropertySet, PropertyValue


class TestMetric(unittest.TestCase):
Expand Down Expand Up @@ -66,7 +67,7 @@ def test_array_null_handling(self):
else:
self.assertEqual(metric2.value, value)

def test_metric_properties(self):
def test_metric_flags(self):
"""Test metric property handling"""
metric = Metric(
timestamp=1234567890,
Expand Down Expand Up @@ -224,3 +225,26 @@ def test_metric_with_partial_metadata(self):
self.assertIsNone(metric2.metadata.file_type)
self.assertIsNone(metric2.metadata.md5)
self.assertIsNone(metric2.metadata.description)

def test_metric_with_properties(self):
"""Test metric with properties"""
metric = Metric(
timestamp=1234567890,
name="test_metric",
datatype=DataType.INT32,
value=42,
properties=PropertySet(
keys=("int",), values=(PropertyValue(type=DataType.INT32, value=123),)
),
)

pb = metric.to_pb(include_dtype=True)
self.assertEqual(len(pb.properties.keys), len(pb.properties.values))
self.assertEqual(["int"], pb.properties.keys)
self.assertEqual(123, pb.properties.values[0].int_value)

metric2 = metric.from_pb(pb)
assert metric2.properties is not None
self.assertEqual(len(metric2.properties.keys), len(metric2.properties.values))
self.assertEqual(("int",), metric2.properties.keys)
self.assertEqual(123, metric2.properties.values[0].value)
26 changes: 26 additions & 0 deletions test/unit_tests/test_properties.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
import unittest

from src.pysparkplug._datatype import DataType
from src.pysparkplug._properties import PropertySet, PropertyValue


class TestProperties(unittest.TestCase):
def test_basic_property(self):
properties = PropertySet(
keys=("int",), values=(PropertyValue(type=DataType.INT32, value=123),)
)
pb = properties.to_pb()
self.assertEqual(len(pb.keys), len(pb.values))
self.assertEqual(["int"], pb.keys)
self.assertEqual(123, pb.values[0].int_value)

def test_nulls(self):
properties = PropertySet(
keys=("anything",), values=(PropertyValue(type=DataType.FLOAT, value=None),)
)
pb = properties.to_pb()
self.assertEqual(["anything"], pb.keys)
self.assertEqual(True, pb.values[0].is_null)
properties2 = PropertySet.from_pb(pb)
self.assertEqual(True, properties2.values[0].is_null)
self.assertEqual(None, properties2.values[0].value)