Compare commits
5 Commits
max_visual
...
use_proto
| Author | SHA1 | Date | |
|---|---|---|---|
| 3d53f00ec9 | |||
| 3a15bd655e | |||
| 65cb8c201d | |||
| 1d1170f19c | |||
| 884a575d7d |
1
.gitignore
vendored
1
.gitignore
vendored
@ -176,3 +176,4 @@ cython_debug/
|
|||||||
# user
|
# user
|
||||||
.DS_Store
|
.DS_Store
|
||||||
*.parquet
|
*.parquet
|
||||||
|
*.png
|
||||||
|
|||||||
135
2025-06-10_16_26.CSV
Normal file
135
2025-06-10_16_26.CSV
Normal file
@ -0,0 +1,135 @@
|
|||||||
|
time,hr
|
||||||
|
00:00:00,113
|
||||||
|
00:00:00,111
|
||||||
|
00:00:01,113
|
||||||
|
00:00:02,114
|
||||||
|
00:00:03,113
|
||||||
|
00:00:04,114
|
||||||
|
00:00:05,113
|
||||||
|
00:00:06,114
|
||||||
|
00:00:07,114
|
||||||
|
00:00:08,114
|
||||||
|
00:00:09,114
|
||||||
|
00:00:10,114
|
||||||
|
00:00:11,114
|
||||||
|
00:00:12,114
|
||||||
|
00:00:13,114
|
||||||
|
00:00:14,114
|
||||||
|
00:00:15,114
|
||||||
|
00:00:16,114
|
||||||
|
00:00:17,113
|
||||||
|
00:00:18,113
|
||||||
|
00:00:19,113
|
||||||
|
00:00:20,112
|
||||||
|
00:00:21,112
|
||||||
|
00:00:22,112
|
||||||
|
00:00:23,113
|
||||||
|
00:00:24,113
|
||||||
|
00:00:25,114
|
||||||
|
00:00:26,115
|
||||||
|
00:00:27,115
|
||||||
|
00:00:28,116
|
||||||
|
00:00:29,117
|
||||||
|
00:00:30,118
|
||||||
|
00:00:31,118
|
||||||
|
00:00:32,119
|
||||||
|
00:00:33,119
|
||||||
|
00:00:34,120
|
||||||
|
00:00:35,121
|
||||||
|
00:00:36,121
|
||||||
|
00:00:37,121
|
||||||
|
00:00:38,121
|
||||||
|
00:00:39,121
|
||||||
|
00:00:40,122
|
||||||
|
00:00:41,122
|
||||||
|
00:00:42,122
|
||||||
|
00:00:43,122
|
||||||
|
00:00:44,122
|
||||||
|
00:00:45,122
|
||||||
|
00:00:46,122
|
||||||
|
00:00:47,121
|
||||||
|
00:00:48,122
|
||||||
|
00:00:49,122
|
||||||
|
00:00:50,122
|
||||||
|
00:00:51,122
|
||||||
|
00:00:52,122
|
||||||
|
00:00:53,122
|
||||||
|
00:00:54,122
|
||||||
|
00:00:55,122
|
||||||
|
00:00:56,123
|
||||||
|
00:00:57,123
|
||||||
|
00:00:58,123
|
||||||
|
00:00:59,123
|
||||||
|
00:01:00,123
|
||||||
|
00:01:01,124
|
||||||
|
00:01:02,124
|
||||||
|
00:01:03,124
|
||||||
|
00:01:04,124
|
||||||
|
00:01:05,124
|
||||||
|
00:01:06,125
|
||||||
|
00:01:07,125
|
||||||
|
00:01:08,126
|
||||||
|
00:01:09,127
|
||||||
|
00:01:10,128
|
||||||
|
00:01:11,129
|
||||||
|
00:01:12,129
|
||||||
|
00:01:13,131
|
||||||
|
00:01:14,132
|
||||||
|
00:01:15,133
|
||||||
|
00:01:16,135
|
||||||
|
00:01:17,137
|
||||||
|
00:01:18,139
|
||||||
|
00:01:19,140
|
||||||
|
00:01:20,140
|
||||||
|
00:01:21,141
|
||||||
|
00:01:22,141
|
||||||
|
00:01:23,141
|
||||||
|
00:01:24,142
|
||||||
|
00:01:25,142
|
||||||
|
00:01:26,142
|
||||||
|
00:01:27,141
|
||||||
|
00:01:28,141
|
||||||
|
00:01:29,141
|
||||||
|
00:01:30,140
|
||||||
|
00:01:31,140
|
||||||
|
00:01:32,139
|
||||||
|
00:01:33,139
|
||||||
|
00:01:34,139
|
||||||
|
00:01:35,139
|
||||||
|
00:01:36,138
|
||||||
|
00:01:37,138
|
||||||
|
00:01:38,138
|
||||||
|
00:01:39,137
|
||||||
|
00:01:40,136
|
||||||
|
00:01:41,135
|
||||||
|
00:01:42,134
|
||||||
|
00:01:43,133
|
||||||
|
00:01:44,131
|
||||||
|
00:01:45,131
|
||||||
|
00:01:46,131
|
||||||
|
00:01:47,130
|
||||||
|
00:01:48,129
|
||||||
|
00:01:49,129
|
||||||
|
00:01:50,129
|
||||||
|
00:01:51,129
|
||||||
|
00:01:52,128
|
||||||
|
00:01:53,128
|
||||||
|
00:01:54,127
|
||||||
|
00:01:55,127
|
||||||
|
00:01:56,126
|
||||||
|
00:01:57,126
|
||||||
|
00:01:58,126
|
||||||
|
00:01:59,125
|
||||||
|
00:02:00,125
|
||||||
|
00:02:01,124
|
||||||
|
00:02:02,124
|
||||||
|
00:02:03,124
|
||||||
|
00:02:04,124
|
||||||
|
00:02:05,123
|
||||||
|
00:02:06,123
|
||||||
|
00:02:07,123
|
||||||
|
00:02:08,124
|
||||||
|
00:02:09,124
|
||||||
|
00:02:10,124
|
||||||
|
00:02:11,125
|
||||||
|
00:02:12,125
|
||||||
|
221
app/proto/__init__.py
Normal file
221
app/proto/__init__.py
Normal file
@ -0,0 +1,221 @@
|
|||||||
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||||
|
# sources: nanopb.proto
|
||||||
|
# plugin: python-betterproto
|
||||||
|
# This file has been @generated
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
import betterproto
|
||||||
|
import betterproto.lib.google.protobuf as betterproto_lib_google_protobuf
|
||||||
|
|
||||||
|
|
||||||
|
class FieldType(betterproto.Enum):
|
||||||
|
FT_DEFAULT = 0
|
||||||
|
FT_CALLBACK = 1
|
||||||
|
FT_POINTER = 4
|
||||||
|
FT_STATIC = 2
|
||||||
|
FT_IGNORE = 3
|
||||||
|
FT_INLINE = 5
|
||||||
|
|
||||||
|
|
||||||
|
class IntSize(betterproto.Enum):
|
||||||
|
IS_DEFAULT = 0
|
||||||
|
IS_8 = 8
|
||||||
|
IS_16 = 16
|
||||||
|
IS_32 = 32
|
||||||
|
IS_64 = 64
|
||||||
|
|
||||||
|
|
||||||
|
class TypenameMangling(betterproto.Enum):
|
||||||
|
M_NONE = 0
|
||||||
|
M_STRIP_PACKAGE = 1
|
||||||
|
M_FLATTEN = 2
|
||||||
|
M_PACKAGE_INITIALS = 3
|
||||||
|
|
||||||
|
|
||||||
|
class DescriptorSize(betterproto.Enum):
|
||||||
|
DS_AUTO = 0
|
||||||
|
DS_1 = 1
|
||||||
|
DS_2 = 2
|
||||||
|
DS_4 = 4
|
||||||
|
DS_8 = 8
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False, repr=False)
|
||||||
|
class NanoPbOptions(betterproto.Message):
|
||||||
|
"""
|
||||||
|
This is the inner options message, which basically defines options for
|
||||||
|
a field. When it is used in message or file scope, it applies to all
|
||||||
|
fields.
|
||||||
|
"""
|
||||||
|
|
||||||
|
max_size: int = betterproto.int32_field(1)
|
||||||
|
"""
|
||||||
|
Allocated size for 'bytes' and 'string' fields.
|
||||||
|
For string fields, this should include the space for null terminator.
|
||||||
|
"""
|
||||||
|
|
||||||
|
max_length: int = betterproto.int32_field(14)
|
||||||
|
"""
|
||||||
|
Maximum length for 'string' fields. Setting this is equivalent
|
||||||
|
to setting max_size to a value of length+1.
|
||||||
|
"""
|
||||||
|
|
||||||
|
max_count: int = betterproto.int32_field(2)
|
||||||
|
"""Allocated number of entries in arrays ('repeated' fields)"""
|
||||||
|
|
||||||
|
int_size: "IntSize" = betterproto.enum_field(7)
|
||||||
|
"""
|
||||||
|
Size of integer fields. Can save some memory if you don't need
|
||||||
|
full 32 bits for the value.
|
||||||
|
"""
|
||||||
|
|
||||||
|
enum_intsize: "IntSize" = betterproto.enum_field(34)
|
||||||
|
"""Size for enum fields. Supported by C++11 and C23 standards."""
|
||||||
|
|
||||||
|
type: "FieldType" = betterproto.enum_field(3)
|
||||||
|
"""Force type of field (callback or static allocation)"""
|
||||||
|
|
||||||
|
long_names: bool = betterproto.bool_field(4)
|
||||||
|
"""Use long names for enums, i.e. EnumName_EnumValue."""
|
||||||
|
|
||||||
|
packed_struct: bool = betterproto.bool_field(5)
|
||||||
|
"""
|
||||||
|
Add 'packed' attribute to generated structs.
|
||||||
|
Note: this cannot be used on CPUs that break on unaligned
|
||||||
|
accesses to variables.
|
||||||
|
"""
|
||||||
|
|
||||||
|
packed_enum: bool = betterproto.bool_field(10)
|
||||||
|
"""Add 'packed' attribute to generated enums."""
|
||||||
|
|
||||||
|
skip_message: bool = betterproto.bool_field(6)
|
||||||
|
"""Skip this message"""
|
||||||
|
|
||||||
|
no_unions: bool = betterproto.bool_field(8)
|
||||||
|
"""Generate oneof fields as normal optional fields instead of union."""
|
||||||
|
|
||||||
|
msgid: int = betterproto.uint32_field(9)
|
||||||
|
"""integer type tag for a message"""
|
||||||
|
|
||||||
|
anonymous_oneof: bool = betterproto.bool_field(11)
|
||||||
|
"""decode oneof as anonymous union"""
|
||||||
|
|
||||||
|
proto3: bool = betterproto.bool_field(12)
|
||||||
|
"""Proto3 singular field does not generate a "has_" flag"""
|
||||||
|
|
||||||
|
proto3_singular_msgs: bool = betterproto.bool_field(21)
|
||||||
|
"""
|
||||||
|
Force proto3 messages to have no "has_" flag.
|
||||||
|
This was default behavior until nanopb-0.4.0.
|
||||||
|
"""
|
||||||
|
|
||||||
|
enum_to_string: bool = betterproto.bool_field(13)
|
||||||
|
"""
|
||||||
|
Generate an enum->string mapping function (can take up lots of space).
|
||||||
|
"""
|
||||||
|
|
||||||
|
enum_validate: bool = betterproto.bool_field(32)
|
||||||
|
"""Generate validation methods for enums"""
|
||||||
|
|
||||||
|
fixed_length: bool = betterproto.bool_field(15)
|
||||||
|
"""Generate bytes arrays with fixed length"""
|
||||||
|
|
||||||
|
fixed_count: bool = betterproto.bool_field(16)
|
||||||
|
"""Generate repeated field with fixed count"""
|
||||||
|
|
||||||
|
submsg_callback: bool = betterproto.bool_field(22)
|
||||||
|
"""
|
||||||
|
Generate message-level callback that is called before decoding submessages.
|
||||||
|
This can be used to set callback fields for submsgs inside oneofs.
|
||||||
|
"""
|
||||||
|
|
||||||
|
mangle_names: "TypenameMangling" = betterproto.enum_field(17)
|
||||||
|
"""
|
||||||
|
Shorten or remove package names from type names.
|
||||||
|
This option applies only on the file level.
|
||||||
|
"""
|
||||||
|
|
||||||
|
callback_datatype: str = betterproto.string_field(18)
|
||||||
|
"""Data type for storage associated with callback fields."""
|
||||||
|
|
||||||
|
callback_function: str = betterproto.string_field(19)
|
||||||
|
"""
|
||||||
|
Callback function used for encoding and decoding.
|
||||||
|
Prior to nanopb-0.4.0, the callback was specified in per-field pb_callback_t
|
||||||
|
structure. This is still supported, but does not work inside e.g. oneof or pointer
|
||||||
|
fields. Instead, a new method allows specifying a per-message callback that
|
||||||
|
will be called for all callback fields in a message type.
|
||||||
|
"""
|
||||||
|
|
||||||
|
descriptorsize: "DescriptorSize" = betterproto.enum_field(20)
|
||||||
|
"""
|
||||||
|
Select the size of field descriptors. This option has to be defined
|
||||||
|
for the whole message, not per-field. Usually automatic selection is
|
||||||
|
ok, but if it results in compilation errors you can increase the field
|
||||||
|
size here.
|
||||||
|
"""
|
||||||
|
|
||||||
|
default_has: bool = betterproto.bool_field(23)
|
||||||
|
"""Set default value for has_ fields."""
|
||||||
|
|
||||||
|
include: List[str] = betterproto.string_field(24)
|
||||||
|
"""Extra files to include in generated `.pb.h`"""
|
||||||
|
|
||||||
|
exclude: List[str] = betterproto.string_field(26)
|
||||||
|
"""
|
||||||
|
Automatic includes to exclude from generated `.pb.h`
|
||||||
|
Same as nanopb_generator.py command line flag -x.
|
||||||
|
"""
|
||||||
|
|
||||||
|
package: str = betterproto.string_field(25)
|
||||||
|
"""Package name that applies only for nanopb."""
|
||||||
|
|
||||||
|
type_override: "betterproto_lib_google_protobuf.FieldDescriptorProtoType" = (
|
||||||
|
betterproto.enum_field(27)
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
Override type of the field in generated C code. Only to be used with related field types
|
||||||
|
"""
|
||||||
|
|
||||||
|
label_override: "betterproto_lib_google_protobuf.FieldDescriptorProtoLabel" = (
|
||||||
|
betterproto.enum_field(31)
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
Override of the label of the field (see FieldDescriptorProto.Label). Can be used to create
|
||||||
|
fields which nanopb considers required in proto3, or whether nanopb treats the field as
|
||||||
|
optional/required/repeated.
|
||||||
|
"""
|
||||||
|
|
||||||
|
sort_by_tag: bool = betterproto.bool_field(28)
|
||||||
|
"""
|
||||||
|
Due to historical reasons, nanopb orders fields in structs by their tag number
|
||||||
|
instead of the order in .proto. Set this to false to keep the .proto order.
|
||||||
|
The default value will probably change to false in nanopb-0.5.0.
|
||||||
|
"""
|
||||||
|
|
||||||
|
fallback_type: "FieldType" = betterproto.enum_field(29)
|
||||||
|
"""
|
||||||
|
Set the FT_DEFAULT field conversion strategy.
|
||||||
|
A field that can become a static member of a c struct (e.g. int, bool, etc)
|
||||||
|
will be a a static field.
|
||||||
|
Fields with dynamic length are converted to either a pointer or a callback.
|
||||||
|
"""
|
||||||
|
|
||||||
|
initializer: str = betterproto.string_field(30)
|
||||||
|
"""
|
||||||
|
Override initializer used in generated MyMessage_init_zero and MyMessage_init_default macros
|
||||||
|
By default decided automatically based on field default value and datatype.
|
||||||
|
"""
|
||||||
|
|
||||||
|
discard_unused_automatic_types: bool = betterproto.bool_field(33)
|
||||||
|
"""
|
||||||
|
Discard unused types that are automatically generated by protoc if they are not actually
|
||||||
|
needed. Currently this applies to map< > types when the field is ignored by options.
|
||||||
|
"""
|
||||||
|
|
||||||
|
discard_deprecated: bool = betterproto.bool_field(35)
|
||||||
|
"""
|
||||||
|
Discard messages and fields marked with [deprecated = true] in the proto file.
|
||||||
|
"""
|
||||||
96
app/proto/hr_packet/__init__.py
Normal file
96
app/proto/hr_packet/__init__.py
Normal file
@ -0,0 +1,96 @@
|
|||||||
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||||
|
# sources: hr_packet.proto
|
||||||
|
# plugin: python-betterproto
|
||||||
|
# This file has been @generated
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
import betterproto
|
||||||
|
|
||||||
|
|
||||||
|
class HrConfidence(betterproto.Enum):
|
||||||
|
ZERO = 0
|
||||||
|
"""[0,25)"""
|
||||||
|
|
||||||
|
LOW = 1
|
||||||
|
"""[25,50)"""
|
||||||
|
|
||||||
|
MEDIUM = 2
|
||||||
|
"""[50,75]"""
|
||||||
|
|
||||||
|
HIGH = 3
|
||||||
|
"""(75,100]"""
|
||||||
|
|
||||||
|
|
||||||
|
class LoRaBw(betterproto.Enum):
|
||||||
|
BW_NONE = 0
|
||||||
|
BW_10_4 = 8
|
||||||
|
"""
|
||||||
|
nobody is using 7.8 kHz bandwidth
|
||||||
|
so to satisfy protobuf, we use NONE for zero value
|
||||||
|
"""
|
||||||
|
|
||||||
|
BW_15_6 = 1
|
||||||
|
BW_20_8 = 9
|
||||||
|
BW_31_25 = 2
|
||||||
|
BW_41_7 = 10
|
||||||
|
BW_62_5 = 3
|
||||||
|
BW_125_0 = 4
|
||||||
|
BW_250_0 = 5
|
||||||
|
BW_500_0 = 6
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False, repr=False)
|
||||||
|
class LoRaParameters(betterproto.Message):
|
||||||
|
bw: "LoRaBw" = betterproto.enum_field(1)
|
||||||
|
sf: int = betterproto.int32_field(2)
|
||||||
|
frequency: float = betterproto.float_field(3)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False, repr=False)
|
||||||
|
class StatusFlag(betterproto.Message):
|
||||||
|
hr_confidence: "HrConfidence" = betterproto.enum_field(1)
|
||||||
|
is_active: bool = betterproto.bool_field(2)
|
||||||
|
is_on_skin: bool = betterproto.bool_field(3)
|
||||||
|
battery: int = betterproto.uint32_field(4)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False, repr=False)
|
||||||
|
class HrOnlyPacket(betterproto.Message):
|
||||||
|
status: "StatusFlag" = betterproto.message_field(1)
|
||||||
|
id: int = betterproto.uint32_field(2)
|
||||||
|
packet_num: int = betterproto.uint32_field(3)
|
||||||
|
hr: int = betterproto.uint32_field(4)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False, repr=False)
|
||||||
|
class HrPpgPacket(betterproto.Message):
|
||||||
|
status: "StatusFlag" = betterproto.message_field(1)
|
||||||
|
id: int = betterproto.uint32_field(2)
|
||||||
|
packet_num: int = betterproto.uint32_field(3)
|
||||||
|
hr: int = betterproto.uint32_field(4)
|
||||||
|
ppg_data: List[int] = betterproto.uint32_field(5)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False, repr=False)
|
||||||
|
class PacketStatus(betterproto.Message):
|
||||||
|
signal_rssi_pkt: int = betterproto.sint32_field(1)
|
||||||
|
"""
|
||||||
|
Estimation of RSSI of the LoRa® signal (after despreading) on last packet received.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False, repr=False)
|
||||||
|
class GatewayInfo(betterproto.Message):
|
||||||
|
region_id: int = betterproto.uint32_field(1)
|
||||||
|
gateway_mac: bytes = betterproto.bytes_field(2)
|
||||||
|
radio_parameters: "LoRaParameters" = betterproto.message_field(3)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(eq=False, repr=False)
|
||||||
|
class HrPacket(betterproto.Message):
|
||||||
|
gateway_info: "GatewayInfo" = betterproto.message_field(1)
|
||||||
|
hr_only_packet: "HrOnlyPacket" = betterproto.message_field(2, group="packet")
|
||||||
|
hr_ppg_packet: "HrPpgPacket" = betterproto.message_field(3, group="packet")
|
||||||
|
packet_status: "PacketStatus" = betterproto.message_field(4)
|
||||||
@ -6,11 +6,11 @@ from typing import Final, Optional
|
|||||||
from loguru import logger
|
from loguru import logger
|
||||||
from anyio import create_udp_socket, create_connected_udp_socket
|
from anyio import create_udp_socket, create_connected_udp_socket
|
||||||
|
|
||||||
DEVICE_NAME: Final[str] = "MAX-HUB"
|
DEVICE_NAME: Final[str] = "Polar Sense E4E71028"
|
||||||
UDP_SERVER_HOST: Final[str] = "localhost"
|
UDP_SERVER_HOST: Final[str] = "localhost"
|
||||||
UDP_SERVER_PORT: Final[int] = 50_000
|
UDP_SERVER_PORT: Final[int] = 50_000
|
||||||
BLE_HR_SERVICE_UUID: Final[str] = "180D"
|
BLE_HR_SERVICE_UUID: Final[str] = "180D"
|
||||||
BLE_HR_CHARACTERISTIC_RAW_UUID: Final[str] = "ff241160-8a02-4626-b499-b1572d2b5a29"
|
BLE_HR_MEASUREMENT_CHARACTERISTIC_UUID: Final[str] = "2A37"
|
||||||
|
|
||||||
|
|
||||||
async def main():
|
async def main():
|
||||||
@ -45,16 +45,16 @@ async def main():
|
|||||||
raise ValueError(f"Characteristic not found: {char_uuid}")
|
raise ValueError(f"Characteristic not found: {char_uuid}")
|
||||||
return char
|
return char
|
||||||
|
|
||||||
hr_raw_char = await find_char(
|
hr_measurement_char = await find_char(
|
||||||
BLE_HR_SERVICE_UUID, BLE_HR_CHARACTERISTIC_RAW_UUID
|
BLE_HR_SERVICE_UUID, BLE_HR_MEASUREMENT_CHARACTERISTIC_UUID
|
||||||
)
|
)
|
||||||
|
|
||||||
async def on_hr_data(char: BleakGATTCharacteristic, data: bytearray):
|
async def on_hr_data(char: BleakGATTCharacteristic, data: bytearray):
|
||||||
logger.info("raw={}", data.hex())
|
logger.info("hr_measurement={}", data.hex())
|
||||||
await udp.send(data)
|
await udp.send(data)
|
||||||
|
|
||||||
logger.info("Starting notify")
|
logger.info("Starting notify")
|
||||||
await client.start_notify(hr_raw_char, on_hr_data)
|
await client.start_notify(hr_measurement_char, on_hr_data)
|
||||||
ev = anyio.Event()
|
ev = anyio.Event()
|
||||||
await ev.wait()
|
await ev.wait()
|
||||||
|
|
||||||
|
|||||||
86
components/app_proto/proto/hr_packet.proto
Normal file
86
components/app_proto/proto/hr_packet.proto
Normal file
@ -0,0 +1,86 @@
|
|||||||
|
// See `Generator Options` section in
|
||||||
|
// https://jpa.kapsi.fi/nanopb/docs/reference.html#generator-options
|
||||||
|
// for nanopb specific options
|
||||||
|
//
|
||||||
|
// Remember to include
|
||||||
|
// https://github.com/nanopb/nanopb/blob/master/generator/proto/nanopb.proto
|
||||||
|
// when generating the proto file
|
||||||
|
|
||||||
|
syntax = "proto3";
|
||||||
|
import "nanopb.proto";
|
||||||
|
|
||||||
|
package hr_packet;
|
||||||
|
|
||||||
|
enum HrConfidence {
|
||||||
|
// [0,25)
|
||||||
|
ZERO = 0;
|
||||||
|
// [25,50)
|
||||||
|
LOW = 1;
|
||||||
|
// [50,75]
|
||||||
|
MEDIUM = 2;
|
||||||
|
// (75,100]
|
||||||
|
HIGH = 3;
|
||||||
|
}
|
||||||
|
|
||||||
|
enum LoRaBW {
|
||||||
|
BW_NONE = 0;
|
||||||
|
// nobody is using 7.8 kHz bandwidth
|
||||||
|
// so to satisfy protobuf, we use NONE for zero value
|
||||||
|
BW_10_4 = 0x08;
|
||||||
|
BW_15_6 = 0x01;
|
||||||
|
BW_20_8 = 0x09;
|
||||||
|
BW_31_25 = 0x02;
|
||||||
|
BW_41_7 = 0x0A;
|
||||||
|
BW_62_5 = 0x03;
|
||||||
|
BW_125_0 = 0x04;
|
||||||
|
BW_250_0 = 0x05;
|
||||||
|
BW_500_0 = 0x06;
|
||||||
|
}
|
||||||
|
|
||||||
|
message LoRaParameters {
|
||||||
|
LoRaBW bw = 1;
|
||||||
|
int32 sf = 2 [(nanopb).int_size = IS_8];
|
||||||
|
float frequency = 3;
|
||||||
|
}
|
||||||
|
|
||||||
|
message StatusFlag {
|
||||||
|
HrConfidence hr_confidence = 1 [(nanopb).int_size = IS_8];
|
||||||
|
bool is_active = 2;
|
||||||
|
bool is_on_skin = 3;
|
||||||
|
uint32 battery = 4 [(nanopb).int_size = IS_8];
|
||||||
|
}
|
||||||
|
|
||||||
|
message HrOnlyPacket {
|
||||||
|
StatusFlag status = 1 [(nanopb).int_size = IS_8];
|
||||||
|
uint32 id = 2 [(nanopb).int_size = IS_8];
|
||||||
|
uint32 packet_num = 3 [(nanopb).int_size = IS_8];
|
||||||
|
uint32 hr = 4 [(nanopb).int_size = IS_8];
|
||||||
|
}
|
||||||
|
|
||||||
|
message HrPpgPacket {
|
||||||
|
StatusFlag status = 1 [(nanopb).int_size = IS_8];
|
||||||
|
uint32 id = 2 [(nanopb).int_size = IS_8];
|
||||||
|
uint32 packet_num = 3 [(nanopb).int_size = IS_8];
|
||||||
|
uint32 hr = 4 [(nanopb).int_size = IS_8];
|
||||||
|
repeated uint32 ppg_data = 5 [(nanopb).max_count = 36];
|
||||||
|
}
|
||||||
|
|
||||||
|
message PacketStatus {
|
||||||
|
// Estimation of RSSI of the LoRa® signal (after despreading) on last packet received.
|
||||||
|
sint32 signal_rssi_pkt = 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
message GatewayInfo {
|
||||||
|
uint32 region_id = 1 [(nanopb).int_size = IS_8];
|
||||||
|
bytes gateway_mac = 2 [(nanopb).max_size = 6];
|
||||||
|
LoRaParameters radio_parameters = 3;
|
||||||
|
}
|
||||||
|
|
||||||
|
message HrPacket {
|
||||||
|
GatewayInfo gateway_info = 1;
|
||||||
|
oneof packet {
|
||||||
|
HrOnlyPacket hr_only_packet = 2;
|
||||||
|
HrPpgPacket hr_ppg_packet = 3;
|
||||||
|
}
|
||||||
|
PacketStatus packet_status = 4;
|
||||||
|
}
|
||||||
213
components/app_proto/proto/nanopb.proto
Normal file
213
components/app_proto/proto/nanopb.proto
Normal file
@ -0,0 +1,213 @@
|
|||||||
|
// This file contains definitions of custom options used to control the
|
||||||
|
// code generator in nanopb protocol buffers library.
|
||||||
|
//
|
||||||
|
// Most commonly used options are max_count and max_size, which allow
|
||||||
|
// the generator to allocate static arrays for repeated and string fields.
|
||||||
|
//
|
||||||
|
// There are three ways to use these options:
|
||||||
|
// 1. Use a separate <protofile>.options file
|
||||||
|
// 2. Use command line switches to nanopb_generator.py
|
||||||
|
// 3. Use [(nanopb).option = value] in your <protofile>.proto file
|
||||||
|
//
|
||||||
|
// For detailed documentation, refer to "Generator options" in docs/reference.md
|
||||||
|
|
||||||
|
syntax = "proto2";
|
||||||
|
import "google/protobuf/descriptor.proto";
|
||||||
|
|
||||||
|
option java_package = "fi.kapsi.koti.jpa.nanopb";
|
||||||
|
|
||||||
|
enum FieldType {
|
||||||
|
FT_DEFAULT = 0; // Automatically decide field type, generate static field if possible.
|
||||||
|
FT_CALLBACK = 1; // Always generate a callback field.
|
||||||
|
FT_POINTER = 4; // Always generate a dynamically allocated field.
|
||||||
|
FT_STATIC = 2; // Generate a static field or raise an exception if not possible.
|
||||||
|
FT_IGNORE = 3; // Ignore the field completely.
|
||||||
|
FT_INLINE = 5; // Legacy option, use the separate 'fixed_length' option instead
|
||||||
|
}
|
||||||
|
|
||||||
|
enum IntSize {
|
||||||
|
IS_DEFAULT = 0; // Default, 32/64bit based on type in .proto
|
||||||
|
IS_8 = 8;
|
||||||
|
IS_16 = 16;
|
||||||
|
IS_32 = 32;
|
||||||
|
IS_64 = 64;
|
||||||
|
}
|
||||||
|
|
||||||
|
enum TypenameMangling {
|
||||||
|
M_NONE = 0; // Default, no typename mangling
|
||||||
|
M_STRIP_PACKAGE = 1; // Strip current package name
|
||||||
|
M_FLATTEN = 2; // Only use last path component
|
||||||
|
M_PACKAGE_INITIALS = 3; // Replace the package name by the initials
|
||||||
|
}
|
||||||
|
|
||||||
|
enum DescriptorSize {
|
||||||
|
DS_AUTO = 0; // Select minimal size based on field type
|
||||||
|
DS_1 = 1; // 1 word; up to 15 byte fields, no arrays
|
||||||
|
DS_2 = 2; // 2 words; up to 4095 byte fields, 4095 entry arrays
|
||||||
|
DS_4 = 4; // 4 words; up to 2^32-1 byte fields, 2^16-1 entry arrays
|
||||||
|
DS_8 = 8; // 8 words; up to 2^32-1 entry arrays
|
||||||
|
}
|
||||||
|
|
||||||
|
// This is the inner options message, which basically defines options for
|
||||||
|
// a field. When it is used in message or file scope, it applies to all
|
||||||
|
// fields.
|
||||||
|
message NanoPBOptions {
|
||||||
|
// Allocated size for 'bytes' and 'string' fields.
|
||||||
|
// For string fields, this should include the space for null terminator.
|
||||||
|
optional int32 max_size = 1;
|
||||||
|
|
||||||
|
// Maximum length for 'string' fields. Setting this is equivalent
|
||||||
|
// to setting max_size to a value of length+1.
|
||||||
|
optional int32 max_length = 14;
|
||||||
|
|
||||||
|
// Allocated number of entries in arrays ('repeated' fields)
|
||||||
|
optional int32 max_count = 2;
|
||||||
|
|
||||||
|
// Size of integer fields. Can save some memory if you don't need
|
||||||
|
// full 32 bits for the value.
|
||||||
|
optional IntSize int_size = 7 [default = IS_DEFAULT];
|
||||||
|
|
||||||
|
// Size for enum fields. Supported by C++11 and C23 standards.
|
||||||
|
optional IntSize enum_intsize = 34 [default = IS_DEFAULT];
|
||||||
|
|
||||||
|
// Force type of field (callback or static allocation)
|
||||||
|
optional FieldType type = 3 [default = FT_DEFAULT];
|
||||||
|
|
||||||
|
// Use long names for enums, i.e. EnumName_EnumValue.
|
||||||
|
optional bool long_names = 4 [default = true];
|
||||||
|
|
||||||
|
// Add 'packed' attribute to generated structs.
|
||||||
|
// Note: this cannot be used on CPUs that break on unaligned
|
||||||
|
// accesses to variables.
|
||||||
|
optional bool packed_struct = 5 [default = false];
|
||||||
|
|
||||||
|
// Add 'packed' attribute to generated enums.
|
||||||
|
optional bool packed_enum = 10 [default = false];
|
||||||
|
|
||||||
|
// Skip this message
|
||||||
|
optional bool skip_message = 6 [default = false];
|
||||||
|
|
||||||
|
// Generate oneof fields as normal optional fields instead of union.
|
||||||
|
optional bool no_unions = 8 [default = false];
|
||||||
|
|
||||||
|
// integer type tag for a message
|
||||||
|
optional uint32 msgid = 9;
|
||||||
|
|
||||||
|
// decode oneof as anonymous union
|
||||||
|
optional bool anonymous_oneof = 11 [default = false];
|
||||||
|
|
||||||
|
// Proto3 singular field does not generate a "has_" flag
|
||||||
|
optional bool proto3 = 12 [default = false];
|
||||||
|
|
||||||
|
// Force proto3 messages to have no "has_" flag.
|
||||||
|
// This was default behavior until nanopb-0.4.0.
|
||||||
|
optional bool proto3_singular_msgs = 21 [default = false];
|
||||||
|
|
||||||
|
// Generate an enum->string mapping function (can take up lots of space).
|
||||||
|
optional bool enum_to_string = 13 [default = false];
|
||||||
|
|
||||||
|
// Generate validation methods for enums
|
||||||
|
optional bool enum_validate = 32 [default = false];
|
||||||
|
|
||||||
|
// Generate bytes arrays with fixed length
|
||||||
|
optional bool fixed_length = 15 [default = false];
|
||||||
|
|
||||||
|
// Generate repeated field with fixed count
|
||||||
|
optional bool fixed_count = 16 [default = false];
|
||||||
|
|
||||||
|
// Generate message-level callback that is called before decoding submessages.
|
||||||
|
// This can be used to set callback fields for submsgs inside oneofs.
|
||||||
|
optional bool submsg_callback = 22 [default = false];
|
||||||
|
|
||||||
|
// Shorten or remove package names from type names.
|
||||||
|
// This option applies only on the file level.
|
||||||
|
optional TypenameMangling mangle_names = 17 [default = M_NONE];
|
||||||
|
|
||||||
|
// Data type for storage associated with callback fields.
|
||||||
|
optional string callback_datatype = 18 [default = "pb_callback_t"];
|
||||||
|
|
||||||
|
// Callback function used for encoding and decoding.
|
||||||
|
// Prior to nanopb-0.4.0, the callback was specified in per-field pb_callback_t
|
||||||
|
// structure. This is still supported, but does not work inside e.g. oneof or pointer
|
||||||
|
// fields. Instead, a new method allows specifying a per-message callback that
|
||||||
|
// will be called for all callback fields in a message type.
|
||||||
|
optional string callback_function = 19 [default = "pb_default_field_callback"];
|
||||||
|
|
||||||
|
// Select the size of field descriptors. This option has to be defined
|
||||||
|
// for the whole message, not per-field. Usually automatic selection is
|
||||||
|
// ok, but if it results in compilation errors you can increase the field
|
||||||
|
// size here.
|
||||||
|
optional DescriptorSize descriptorsize = 20 [default = DS_AUTO];
|
||||||
|
|
||||||
|
// Set default value for has_ fields.
|
||||||
|
optional bool default_has = 23 [default = false];
|
||||||
|
|
||||||
|
// Extra files to include in generated `.pb.h`
|
||||||
|
repeated string include = 24;
|
||||||
|
|
||||||
|
// Automatic includes to exclude from generated `.pb.h`
|
||||||
|
// Same as nanopb_generator.py command line flag -x.
|
||||||
|
repeated string exclude = 26;
|
||||||
|
|
||||||
|
// Package name that applies only for nanopb.
|
||||||
|
optional string package = 25;
|
||||||
|
|
||||||
|
// Override type of the field in generated C code. Only to be used with related field types
|
||||||
|
optional google.protobuf.FieldDescriptorProto.Type type_override = 27;
|
||||||
|
|
||||||
|
// Override of the label of the field (see FieldDescriptorProto.Label). Can be used to create
|
||||||
|
// fields which nanopb considers required in proto3, or whether nanopb treats the field as
|
||||||
|
// optional/required/repeated.
|
||||||
|
optional google.protobuf.FieldDescriptorProto.Label label_override = 31;
|
||||||
|
|
||||||
|
// Due to historical reasons, nanopb orders fields in structs by their tag number
|
||||||
|
// instead of the order in .proto. Set this to false to keep the .proto order.
|
||||||
|
// The default value will probably change to false in nanopb-0.5.0.
|
||||||
|
optional bool sort_by_tag = 28 [default = true];
|
||||||
|
|
||||||
|
// Set the FT_DEFAULT field conversion strategy.
|
||||||
|
// A field that can become a static member of a c struct (e.g. int, bool, etc)
|
||||||
|
// will be a a static field.
|
||||||
|
// Fields with dynamic length are converted to either a pointer or a callback.
|
||||||
|
optional FieldType fallback_type = 29 [default = FT_CALLBACK];
|
||||||
|
|
||||||
|
// Override initializer used in generated MyMessage_init_zero and MyMessage_init_default macros
|
||||||
|
// By default decided automatically based on field default value and datatype.
|
||||||
|
optional string initializer = 30;
|
||||||
|
|
||||||
|
// Discard unused types that are automatically generated by protoc if they are not actually
|
||||||
|
// needed. Currently this applies to map< > types when the field is ignored by options.
|
||||||
|
optional bool discard_unused_automatic_types = 33 [default = true];
|
||||||
|
|
||||||
|
// Discard messages and fields marked with [deprecated = true] in the proto file.
|
||||||
|
optional bool discard_deprecated = 35 [default = false];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extensions to protoc 'Descriptor' type in order to define options
|
||||||
|
// inside a .proto file.
|
||||||
|
//
|
||||||
|
// Protocol Buffers extension number registry
|
||||||
|
// --------------------------------
|
||||||
|
// Project: Nanopb
|
||||||
|
// Contact: Petteri Aimonen <jpa@kapsi.fi>
|
||||||
|
// Web site: http://kapsi.fi/~jpa/nanopb
|
||||||
|
// Extensions: 1010 (all types)
|
||||||
|
// --------------------------------
|
||||||
|
|
||||||
|
extend google.protobuf.FileOptions {
|
||||||
|
optional NanoPBOptions nanopb_fileopt = 1010;
|
||||||
|
}
|
||||||
|
|
||||||
|
extend google.protobuf.MessageOptions {
|
||||||
|
optional NanoPBOptions nanopb_msgopt = 1010;
|
||||||
|
}
|
||||||
|
|
||||||
|
extend google.protobuf.EnumOptions {
|
||||||
|
optional NanoPBOptions nanopb_enumopt = 1010;
|
||||||
|
}
|
||||||
|
|
||||||
|
extend google.protobuf.FieldOptions {
|
||||||
|
optional NanoPBOptions nanopb = 1010;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
120
convert_parquet_to_csv.py
Normal file
120
convert_parquet_to_csv.py
Normal file
@ -0,0 +1,120 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Convert parquet file to CSV with only HR field
|
||||||
|
"""
|
||||||
|
|
||||||
|
import pyarrow.parquet as pq
|
||||||
|
import pyarrow as pa
|
||||||
|
import pandas as pd
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
def convert_parquet_to_csv(parquet_file, output_file):
|
||||||
|
"""Convert parquet file to CSV extracting only HR data"""
|
||||||
|
try:
|
||||||
|
# Read the parquet file
|
||||||
|
print(f"Reading {parquet_file}...")
|
||||||
|
pf = pq.ParquetFile(parquet_file)
|
||||||
|
|
||||||
|
print("Schema:")
|
||||||
|
print(pf.schema)
|
||||||
|
|
||||||
|
# Try to read just specific columns
|
||||||
|
try:
|
||||||
|
# Read the table with specific columns
|
||||||
|
table = pf.read(columns=["hr_data"])
|
||||||
|
print("Successfully read hr_data column")
|
||||||
|
|
||||||
|
# Convert to pandas, handling the nested structure carefully
|
||||||
|
df = table.to_pandas()
|
||||||
|
print("Converted to pandas DataFrame")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error reading with pandas conversion: {e}")
|
||||||
|
# Try alternative approach - read raw pyarrow table
|
||||||
|
table = pf.read()
|
||||||
|
print("Read raw table successfully")
|
||||||
|
|
||||||
|
# Get hr_data column directly from pyarrow
|
||||||
|
hr_data_column = table.column("hr_data")
|
||||||
|
print(f"HR data column type: {hr_data_column.type}")
|
||||||
|
|
||||||
|
# Convert the column to a list format
|
||||||
|
hr_data_values = []
|
||||||
|
for i in range(len(hr_data_column)):
|
||||||
|
chunk = hr_data_column.chunk(0)
|
||||||
|
list_array = chunk.slice(i, 1).to_pandas().iloc[0]
|
||||||
|
if list_array is not None and len(list_array) > 0:
|
||||||
|
hr_data_values.extend(list_array)
|
||||||
|
|
||||||
|
if hr_data_values:
|
||||||
|
# Create DataFrame with HR data
|
||||||
|
hr_df = pd.DataFrame({"HR": hr_data_values})
|
||||||
|
|
||||||
|
print(f"\nExtracted {len(hr_data_values)} HR values")
|
||||||
|
print("Sample HR values:")
|
||||||
|
print(hr_df.head(10))
|
||||||
|
|
||||||
|
# Save to CSV
|
||||||
|
hr_df.to_csv(output_file, index=False)
|
||||||
|
print(f"\nSaved HR data to {output_file}")
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
print("No HR data found")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# If we got here, the pandas conversion worked
|
||||||
|
print("Columns available:")
|
||||||
|
print(df.columns.tolist())
|
||||||
|
print("\nData shape:", df.shape)
|
||||||
|
|
||||||
|
# Extract HR data - assuming it's in hr_data column
|
||||||
|
if "hr_data" in df.columns:
|
||||||
|
# Handle nested list structure
|
||||||
|
hr_values = []
|
||||||
|
for row_idx in range(len(df)):
|
||||||
|
hr_data = df["hr_data"].iloc[row_idx]
|
||||||
|
if hr_data is not None and len(hr_data) > 0:
|
||||||
|
hr_values.extend(hr_data)
|
||||||
|
|
||||||
|
if hr_values:
|
||||||
|
# Create a new DataFrame with HR data
|
||||||
|
hr_df = pd.DataFrame({"HR": hr_values})
|
||||||
|
|
||||||
|
print(f"\nExtracted {len(hr_values)} HR values")
|
||||||
|
print("Sample HR values:")
|
||||||
|
print(hr_df.head(10))
|
||||||
|
|
||||||
|
# Save to CSV
|
||||||
|
hr_df.to_csv(output_file, index=False)
|
||||||
|
print(f"\nSaved HR data to {output_file}")
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
print("No HR values found in the data")
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
print("Error: 'hr_data' column not found in the data")
|
||||||
|
print("Available columns:", df.columns.tolist())
|
||||||
|
return False
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"Error: {e}")
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
traceback.print_exc()
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
parquet_file = "history_20250610_165414.parquet"
|
||||||
|
output_file = "history_20250610_165414_HR.csv"
|
||||||
|
|
||||||
|
success = convert_parquet_to_csv(parquet_file, output_file)
|
||||||
|
|
||||||
|
if success:
|
||||||
|
print(f"\nConversion completed successfully!")
|
||||||
|
print(f"Input: {parquet_file}")
|
||||||
|
print(f"Output: {output_file}")
|
||||||
|
else:
|
||||||
|
print("Conversion failed!")
|
||||||
|
sys.exit(1)
|
||||||
7
gen_client.sh
Executable file
7
gen_client.sh
Executable file
@ -0,0 +1,7 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
|
||||||
|
PROJECT_ROOT=$SCRIPT_DIR
|
||||||
|
APP_PROTO_DIR=$PROJECT_ROOT/components/app_proto/proto
|
||||||
|
OUTPUT_DIR=$SCRIPT_DIR/app/proto
|
||||||
|
|
||||||
|
uv run python -m grpc_tools.protoc --python_betterproto_out=$OUTPUT_DIR -I $APP_PROTO_DIR $APP_PROTO_DIR/hr_packet.proto
|
||||||
89
history_20250610_165414_HR.csv
Normal file
89
history_20250610_165414_HR.csv
Normal file
@ -0,0 +1,89 @@
|
|||||||
|
HR
|
||||||
|
120.0
|
||||||
|
120.0
|
||||||
|
120.0
|
||||||
|
121.0
|
||||||
|
121.0
|
||||||
|
121.0
|
||||||
|
122.0
|
||||||
|
124.0
|
||||||
|
124.0
|
||||||
|
124.0
|
||||||
|
125.0
|
||||||
|
125.0
|
||||||
|
125.0
|
||||||
|
125.0
|
||||||
|
124.0
|
||||||
|
124.0
|
||||||
|
125.0
|
||||||
|
126.0
|
||||||
|
127.0
|
||||||
|
129.0
|
||||||
|
130.0
|
||||||
|
131.0
|
||||||
|
132.0
|
||||||
|
134.0
|
||||||
|
135.0
|
||||||
|
137.0
|
||||||
|
138.0
|
||||||
|
140.0
|
||||||
|
141.0
|
||||||
|
143.0
|
||||||
|
144.0
|
||||||
|
144.0
|
||||||
|
145.0
|
||||||
|
146.0
|
||||||
|
147.0
|
||||||
|
147.0
|
||||||
|
147.0
|
||||||
|
145.0
|
||||||
|
144.0
|
||||||
|
143.0
|
||||||
|
142.0
|
||||||
|
140.0
|
||||||
|
139.0
|
||||||
|
138.0
|
||||||
|
138.0
|
||||||
|
138.0
|
||||||
|
138.0
|
||||||
|
138.0
|
||||||
|
138.0
|
||||||
|
137.0
|
||||||
|
135.0
|
||||||
|
134.0
|
||||||
|
132.0
|
||||||
|
130.0
|
||||||
|
129.0
|
||||||
|
128.0
|
||||||
|
127.0
|
||||||
|
127.0
|
||||||
|
127.0
|
||||||
|
127.0
|
||||||
|
127.0
|
||||||
|
128.0
|
||||||
|
128.0
|
||||||
|
129.0
|
||||||
|
129.0
|
||||||
|
129.0
|
||||||
|
129.0
|
||||||
|
128.0
|
||||||
|
127.0
|
||||||
|
127.0
|
||||||
|
125.0
|
||||||
|
124.0
|
||||||
|
124.0
|
||||||
|
123.0
|
||||||
|
123.0
|
||||||
|
123.0
|
||||||
|
123.0
|
||||||
|
124.0
|
||||||
|
124.0
|
||||||
|
125.0
|
||||||
|
125.0
|
||||||
|
126.0
|
||||||
|
127.0
|
||||||
|
127.0
|
||||||
|
128.0
|
||||||
|
128.0
|
||||||
|
128.0
|
||||||
|
129.0
|
||||||
|
359
main.py
359
main.py
@ -11,6 +11,8 @@ from typing import (
|
|||||||
TypedDict,
|
TypedDict,
|
||||||
Any,
|
Any,
|
||||||
cast,
|
cast,
|
||||||
|
Dict,
|
||||||
|
Union,
|
||||||
)
|
)
|
||||||
|
|
||||||
from loguru import logger
|
from loguru import logger
|
||||||
@ -25,33 +27,47 @@ from threading import Thread
|
|||||||
from time import sleep
|
from time import sleep
|
||||||
from pydantic import BaseModel, computed_field
|
from pydantic import BaseModel, computed_field
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
|
import struct
|
||||||
import awkward as ak
|
import awkward as ak
|
||||||
from awkward import Array as AwkwardArray, Record as AwkwardRecord
|
from awkward import Array as AwkwardArray, Record as AwkwardRecord
|
||||||
from app.model import AlgoReport, HrPacket, hr_confidence_to_num
|
from app.model import AlgoReport
|
||||||
from app.utils import Instant
|
from app.utils import Instant
|
||||||
from collections import deque
|
from collections import deque
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
from aiomqtt import Client as MqttClient, Message as MqttMessage
|
||||||
|
from app.proto.hr_packet import HrPacket, HrOnlyPacket, HrPpgPacket, HrConfidence
|
||||||
|
import betterproto
|
||||||
|
|
||||||
|
|
||||||
class AppHistory(TypedDict):
|
MQTT_BROKER: Final[str] = "weihua-iot.cn"
|
||||||
|
MQTT_BROKER_PORT: Final[int] = 1883
|
||||||
|
MAX_LENGTH = 600
|
||||||
|
TOPIC: Final[str] = "/hr/region/1/band/#"
|
||||||
|
UDP_DEVICE_ID: Final[int] = 0xFF
|
||||||
|
|
||||||
|
NDArray = np.ndarray
|
||||||
|
T = TypeVar("T")
|
||||||
|
|
||||||
|
|
||||||
|
class DeviceHistory(TypedDict):
|
||||||
timescape: deque[datetime]
|
timescape: deque[datetime]
|
||||||
hr_data: deque[float]
|
hr_data: deque[float]
|
||||||
hr_conf: deque[float] # in %
|
|
||||||
accel_x_data: deque[int]
|
|
||||||
accel_y_data: deque[int]
|
|
||||||
accel_z_data: deque[int]
|
|
||||||
pd_data: deque[int]
|
|
||||||
|
|
||||||
|
|
||||||
# https://handmadesoftware.medium.com/streamlit-asyncio-and-mongodb-f85f77aea825
|
# https://handmadesoftware.medium.com/streamlit-asyncio-and-mongodb-f85f77aea825
|
||||||
class AppState(TypedDict):
|
class AppState(TypedDict):
|
||||||
worker_thread: Thread
|
worker_thread: Thread
|
||||||
message_queue: MemoryObjectReceiveStream[bytes]
|
message_queue: MemoryObjectReceiveStream[Tuple[datetime, bytes]]
|
||||||
|
mqtt_message_queue: MemoryObjectReceiveStream[Tuple[datetime, MqttMessage]]
|
||||||
task_group: TaskGroup
|
task_group: TaskGroup
|
||||||
history: AppHistory
|
device_histories: Dict[Union[int, str], DeviceHistory] # device_id -> DeviceHistory
|
||||||
refresh_inst: Instant
|
|
||||||
|
|
||||||
|
|
||||||
|
BUSY_POLLING_INTERVAL_S: Final[float] = 0.001
|
||||||
|
BATCH_MESSAGE_INTERVAL_S: Final[float] = 0.1
|
||||||
|
NORMAL_REFRESH_INTERVAL_S: Final[float] = 0.5
|
||||||
|
QUEUE_BUFFER_SIZE: Final[int] = 32
|
||||||
|
|
||||||
UDP_SERVER_HOST: Final[str] = "localhost"
|
UDP_SERVER_HOST: Final[str] = "localhost"
|
||||||
UDP_SERVER_PORT: Final[int] = 50_000
|
UDP_SERVER_PORT: Final[int] = 50_000
|
||||||
MAX_LENGTH = 600
|
MAX_LENGTH = 600
|
||||||
@ -66,43 +82,150 @@ def unwrap(value: Optional[T]) -> T:
|
|||||||
return value
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
def parse_ble_hr_measurement(data: bytes) -> Optional[int]:
|
||||||
|
"""
|
||||||
|
Parse BLE Heart Rate Measurement characteristic data according to Bluetooth specification.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
data: Raw bytes from the heart rate measurement characteristic
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Heart rate value in BPM, or None if parsing fails
|
||||||
|
Note:
|
||||||
|
```cpp
|
||||||
|
/**
|
||||||
|
* @brief Structure of the Heart Rate Measurement characteristic
|
||||||
|
*
|
||||||
|
* @see https://www.bluetooth.com/specifications/gss/
|
||||||
|
* @see section 3.116 Heart Rate Measurement of the document: GATT Specification Supplement.
|
||||||
|
*/
|
||||||
|
struct ble_hr_measurement_flag_t {
|
||||||
|
// LSB first
|
||||||
|
|
||||||
|
/*
|
||||||
|
* 0: uint8_t
|
||||||
|
* 1: uint16_t
|
||||||
|
*/
|
||||||
|
bool heart_rate_value_format : 1;
|
||||||
|
bool sensor_contact_detected : 1;
|
||||||
|
bool sensor_contact_supported : 1;
|
||||||
|
bool energy_expended_present : 1;
|
||||||
|
bool rr_interval_present : 1;
|
||||||
|
uint8_t reserved : 3;
|
||||||
|
};
|
||||||
|
```
|
||||||
|
"""
|
||||||
|
if len(data) < 2:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# First byte contains flags
|
||||||
|
flags = data[0]
|
||||||
|
|
||||||
|
# Bit 0: Heart Rate Value Format (0 = uint8, 1 = uint16)
|
||||||
|
heart_rate_value_format = (flags & 0x01) != 0
|
||||||
|
|
||||||
|
try:
|
||||||
|
if heart_rate_value_format:
|
||||||
|
# 16-bit heart rate value (little endian)
|
||||||
|
if len(data) < 3:
|
||||||
|
return None
|
||||||
|
hr_value = int.from_bytes(data[1:3], byteorder="little")
|
||||||
|
else:
|
||||||
|
# 8-bit heart rate value
|
||||||
|
hr_value = data[1]
|
||||||
|
|
||||||
|
return hr_value
|
||||||
|
except (IndexError, ValueError):
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def hr_confidence_to_percentage(confidence: HrConfidence) -> float:
|
||||||
|
"""Convert HrConfidence enum to percentage value"""
|
||||||
|
if confidence == HrConfidence.ZERO:
|
||||||
|
return 0 # mid-point of [0,25)
|
||||||
|
elif confidence == HrConfidence.LOW:
|
||||||
|
return 37.5 # mid-point of [25,50)
|
||||||
|
elif confidence == HrConfidence.MEDIUM:
|
||||||
|
return 62.5 # mid-point of [50,75)
|
||||||
|
elif confidence == HrConfidence.HIGH:
|
||||||
|
return 100 # mid-point of (75,100]
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Invalid HrConfidence: {confidence}")
|
||||||
|
|
||||||
|
|
||||||
|
def create_device_history() -> DeviceHistory:
|
||||||
|
"""Create a new device history structure"""
|
||||||
|
return {
|
||||||
|
"timescape": deque(maxlen=MAX_LENGTH),
|
||||||
|
"hr_data": deque(maxlen=MAX_LENGTH),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_device_name(device_id: Union[int, str]) -> str:
|
||||||
|
"""Get display name for device"""
|
||||||
|
if device_id == UDP_DEVICE_ID:
|
||||||
|
return "UDP Device"
|
||||||
|
return f"Device {device_id}"
|
||||||
|
|
||||||
|
|
||||||
@st.cache_resource
|
@st.cache_resource
|
||||||
def resource(params: Any = None):
|
def resource(params: Any = None):
|
||||||
set_ev = anyio.Event()
|
set_ev = anyio.Event()
|
||||||
tx, rx = create_memory_object_stream[bytes]()
|
tx, rx = create_memory_object_stream[Tuple[datetime, bytes]](
|
||||||
|
max_buffer_size=QUEUE_BUFFER_SIZE
|
||||||
|
)
|
||||||
|
mqtt_tx, mqtt_rx = create_memory_object_stream[Tuple[datetime, MqttMessage]](
|
||||||
|
max_buffer_size=QUEUE_BUFFER_SIZE
|
||||||
|
)
|
||||||
tg: Optional[TaskGroup] = None
|
tg: Optional[TaskGroup] = None
|
||||||
|
|
||||||
async def poll_task():
|
async def udp_task():
|
||||||
nonlocal set_ev
|
|
||||||
nonlocal tg
|
|
||||||
tg = anyio.create_task_group()
|
|
||||||
set_ev.set()
|
|
||||||
async with tg:
|
|
||||||
async with await create_udp_socket(
|
async with await create_udp_socket(
|
||||||
local_host=UDP_SERVER_HOST, local_port=UDP_SERVER_PORT, reuse_port=True
|
local_host=UDP_SERVER_HOST, local_port=UDP_SERVER_PORT, reuse_port=True
|
||||||
) as udp:
|
) as udp:
|
||||||
|
logger.info(
|
||||||
|
"UDP server listening on {}:{}", UDP_SERVER_HOST, UDP_SERVER_PORT
|
||||||
|
)
|
||||||
async for packet, _ in udp:
|
async for packet, _ in udp:
|
||||||
await tx.send(packet)
|
timestamp = datetime.now()
|
||||||
|
await tx.send((timestamp, packet))
|
||||||
|
|
||||||
tr = Thread(target=anyio.run, args=(poll_task,))
|
async def mqtt_task():
|
||||||
|
async with MqttClient(MQTT_BROKER, port=MQTT_BROKER_PORT) as client:
|
||||||
|
await client.subscribe(TOPIC)
|
||||||
|
logger.info(
|
||||||
|
"Subscribed to MQTT broker {}:{} topic {}",
|
||||||
|
MQTT_BROKER,
|
||||||
|
MQTT_BROKER_PORT,
|
||||||
|
TOPIC,
|
||||||
|
)
|
||||||
|
async for message in client.messages:
|
||||||
|
timestamp = datetime.now()
|
||||||
|
await mqtt_tx.send((timestamp, message))
|
||||||
|
|
||||||
|
async def combined_task():
|
||||||
|
nonlocal set_ev, tg
|
||||||
|
tg = anyio.create_task_group()
|
||||||
|
set_ev.set()
|
||||||
|
async with tg:
|
||||||
|
async with anyio.create_task_group() as inner_tg:
|
||||||
|
inner_tg.start_soon(udp_task)
|
||||||
|
inner_tg.start_soon(mqtt_task)
|
||||||
|
|
||||||
|
tr = Thread(target=anyio.run, args=(combined_task,))
|
||||||
tr.start()
|
tr.start()
|
||||||
|
|
||||||
while not set_ev.is_set():
|
while not set_ev.is_set():
|
||||||
sleep(0.01)
|
sleep(BUSY_POLLING_INTERVAL_S)
|
||||||
logger.info("Poll task initialized")
|
|
||||||
|
logger.info("UDP and MQTT tasks initialized in single thread")
|
||||||
|
|
||||||
state: AppState = {
|
state: AppState = {
|
||||||
"worker_thread": tr,
|
"worker_thread": tr,
|
||||||
"message_queue": rx,
|
"message_queue": rx,
|
||||||
|
"mqtt_message_queue": mqtt_rx,
|
||||||
"task_group": unwrap(tg),
|
"task_group": unwrap(tg),
|
||||||
"history": {
|
"device_histories": {},
|
||||||
"timescape": deque(maxlen=MAX_LENGTH),
|
|
||||||
"hr_data": deque(maxlen=MAX_LENGTH),
|
|
||||||
"hr_conf": deque(maxlen=MAX_LENGTH),
|
|
||||||
"accel_x_data": deque(maxlen=MAX_LENGTH),
|
|
||||||
"accel_y_data": deque(maxlen=MAX_LENGTH),
|
|
||||||
"accel_z_data": deque(maxlen=MAX_LENGTH),
|
|
||||||
"pd_data": deque(maxlen=MAX_LENGTH),
|
|
||||||
},
|
|
||||||
"refresh_inst": Instant(),
|
|
||||||
}
|
}
|
||||||
logger.info("Resource created")
|
logger.info("Resource created")
|
||||||
return state
|
return state
|
||||||
@ -110,26 +233,27 @@ def resource(params: Any = None):
|
|||||||
|
|
||||||
def main():
|
def main():
|
||||||
state = resource()
|
state = resource()
|
||||||
history = state["history"]
|
device_histories = state["device_histories"]
|
||||||
|
|
||||||
def on_export():
|
def on_export():
|
||||||
file_name = f"history_{datetime.now().strftime('%Y%m%d_%H%M%S')}.parquet"
|
file_name = f"history_{datetime.now().strftime('%Y%m%d_%H%M%S')}.parquet"
|
||||||
logger.info(f"Exporting to {file_name}")
|
logger.info(f"Exporting to {file_name}")
|
||||||
rec = ak.Record(history)
|
|
||||||
|
# Export all device histories
|
||||||
|
export_data = {
|
||||||
|
device_id: ak.Record(dev_hist)
|
||||||
|
for device_id, dev_hist in device_histories.items()
|
||||||
|
}
|
||||||
|
rec = ak.Record(export_data)
|
||||||
ak.to_parquet(rec, file_name)
|
ak.to_parquet(rec, file_name)
|
||||||
|
|
||||||
def on_clear():
|
def on_clear():
|
||||||
nonlocal history
|
nonlocal device_histories
|
||||||
logger.info("Clearing history")
|
logger.info("Clearing history")
|
||||||
history["timescape"].clear()
|
device_histories.clear()
|
||||||
history["hr_data"].clear()
|
|
||||||
history["hr_conf"].clear()
|
|
||||||
history["accel_x_data"].clear()
|
|
||||||
history["accel_y_data"].clear()
|
|
||||||
history["accel_z_data"].clear()
|
|
||||||
|
|
||||||
# https://docs.streamlit.io/develop/api-reference/layout
|
# https://docs.streamlit.io/develop/api-reference/layout
|
||||||
st.title("MAX-BAND Visualizer")
|
st.title("HR Visualizer")
|
||||||
with st.container(border=True):
|
with st.container(border=True):
|
||||||
c1, c2 = st.columns(2)
|
c1, c2 = st.columns(2)
|
||||||
with c1:
|
with c1:
|
||||||
@ -145,56 +269,133 @@ def main():
|
|||||||
on_click=on_clear,
|
on_click=on_clear,
|
||||||
)
|
)
|
||||||
|
|
||||||
placeholder = st.empty()
|
# Device selection
|
||||||
md_placeholder = st.empty()
|
if device_histories:
|
||||||
|
selected_devices = st.multiselect(
|
||||||
|
"Select devices to display:",
|
||||||
|
options=list(device_histories.keys()),
|
||||||
|
default=list(device_histories.keys()),
|
||||||
|
format_func=get_device_name,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
selected_devices = []
|
||||||
|
|
||||||
|
# Process UDP messages (treat as device_id = 0)
|
||||||
|
try:
|
||||||
while True:
|
while True:
|
||||||
try:
|
timestamp, packet = state["message_queue"].receive_nowait()
|
||||||
message = state["message_queue"].receive_nowait()
|
hr_value = parse_ble_hr_measurement(packet)
|
||||||
|
if hr_value is not None:
|
||||||
|
now = timestamp
|
||||||
|
|
||||||
|
if UDP_DEVICE_ID not in device_histories:
|
||||||
|
device_histories[UDP_DEVICE_ID] = create_device_history()
|
||||||
|
|
||||||
|
dev_hist = device_histories[UDP_DEVICE_ID]
|
||||||
|
dev_hist["timescape"].append(now)
|
||||||
|
dev_hist["hr_data"].append(float(hr_value))
|
||||||
|
|
||||||
|
logger.debug("UDP Device: HR={}", hr_value)
|
||||||
except anyio.WouldBlock:
|
except anyio.WouldBlock:
|
||||||
continue
|
pass
|
||||||
|
|
||||||
|
# Process MQTT messages
|
||||||
try:
|
try:
|
||||||
packet = HrPacket.unmarshal(message)
|
while True:
|
||||||
except ValueError as e:
|
timestamp, mqtt_message = state["mqtt_message_queue"].receive_nowait()
|
||||||
logger.error(f"bad packet: {e}")
|
if mqtt_message.payload:
|
||||||
|
try:
|
||||||
|
payload_bytes = mqtt_message.payload
|
||||||
|
if isinstance(payload_bytes, str):
|
||||||
|
payload_bytes = payload_bytes.encode("utf-8")
|
||||||
|
elif not isinstance(payload_bytes, bytes):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
with placeholder.container():
|
hr_packet = HrPacket()
|
||||||
history["hr_data"].append(packet.hr)
|
hr_packet.parse(payload_bytes)
|
||||||
history["hr_conf"].append(hr_confidence_to_num(packet.status.hr_confidence))
|
|
||||||
history["pd_data"].extend(packet.raw_data)
|
|
||||||
fig_hr, fig_pd = st.tabs(["Heart Rate", "PD"])
|
|
||||||
|
|
||||||
with fig_hr:
|
now = timestamp
|
||||||
st.plotly_chart(
|
|
||||||
go.Figure(
|
device_id = None
|
||||||
data=[
|
hr_value = None
|
||||||
go.Scatter(
|
|
||||||
y=list(history["hr_data"]),
|
if hr_packet.hr_only_packet:
|
||||||
mode="lines",
|
packet = hr_packet.hr_only_packet
|
||||||
name="HR",
|
device_id = packet.id
|
||||||
),
|
hr_value = packet.hr
|
||||||
go.Scatter(
|
elif hr_packet.hr_ppg_packet:
|
||||||
y=list(history["hr_conf"]),
|
packet = hr_packet.hr_ppg_packet
|
||||||
mode="lines",
|
device_id = packet.id
|
||||||
name="HR Confidence",
|
hr_value = packet.hr
|
||||||
),
|
|
||||||
|
if device_id is not None and hr_value is not None:
|
||||||
|
if device_id not in device_histories:
|
||||||
|
device_histories[device_id] = create_device_history()
|
||||||
|
|
||||||
|
dev_hist = device_histories[device_id]
|
||||||
|
dev_hist["timescape"].append(now)
|
||||||
|
dev_hist["hr_data"].append(float(hr_value))
|
||||||
|
|
||||||
|
logger.debug("Device {}: HR={}", device_id, hr_value)
|
||||||
|
except (ValueError, TypeError, IndexError, UnicodeDecodeError) as e:
|
||||||
|
logger.error("MQTT protobuf message parsing: {}", e)
|
||||||
|
except anyio.WouldBlock:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Update visualization - HR Graphs
|
||||||
|
if device_histories:
|
||||||
|
st.subheader("Heart Rate Data")
|
||||||
|
|
||||||
|
# Create plots for selected devices
|
||||||
|
traces = []
|
||||||
|
colors = [
|
||||||
|
"red",
|
||||||
|
"green",
|
||||||
|
"blue",
|
||||||
|
"orange",
|
||||||
|
"purple",
|
||||||
|
"brown",
|
||||||
|
"pink",
|
||||||
|
"gray",
|
||||||
|
"olive",
|
||||||
|
"cyan",
|
||||||
]
|
]
|
||||||
)
|
|
||||||
)
|
for i, device_id in enumerate(selected_devices):
|
||||||
with fig_pd:
|
if device_id in device_histories:
|
||||||
st.plotly_chart(
|
dev_hist = device_histories[device_id]
|
||||||
go.Figure(
|
if dev_hist["hr_data"] and dev_hist["timescape"]:
|
||||||
data=[
|
color = colors[i % len(colors)]
|
||||||
|
traces.append(
|
||||||
go.Scatter(
|
go.Scatter(
|
||||||
y=list(history["pd_data"]),
|
x=list(dev_hist["timescape"]),
|
||||||
mode="lines",
|
y=list(dev_hist["hr_data"]),
|
||||||
name="PD",
|
mode="lines+markers",
|
||||||
)
|
name=get_device_name(device_id),
|
||||||
]
|
line=dict(color=color),
|
||||||
|
marker=dict(size=4),
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if traces:
|
||||||
|
fig = go.Figure(data=traces)
|
||||||
|
fig.update_layout(
|
||||||
|
title="Heart Rate Monitor",
|
||||||
|
xaxis_title="Time",
|
||||||
|
yaxis_title="Heart Rate (BPM)",
|
||||||
|
hovermode="x unified",
|
||||||
|
showlegend=True,
|
||||||
|
height=500,
|
||||||
|
)
|
||||||
|
st.plotly_chart(fig, use_container_width=True)
|
||||||
|
else:
|
||||||
|
st.info("No heart rate data available for selected devices")
|
||||||
|
else:
|
||||||
|
st.info("No devices connected yet. Waiting for data...")
|
||||||
|
|
||||||
|
sleep(NORMAL_REFRESH_INTERVAL_S)
|
||||||
|
st.rerun()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|||||||
234
main_mqtt.py
Normal file
234
main_mqtt.py
Normal file
@ -0,0 +1,234 @@
|
|||||||
|
from typing import (
|
||||||
|
Annotated,
|
||||||
|
AsyncGenerator,
|
||||||
|
Final,
|
||||||
|
Generator,
|
||||||
|
List,
|
||||||
|
Literal,
|
||||||
|
Optional,
|
||||||
|
Tuple,
|
||||||
|
TypeVar,
|
||||||
|
TypedDict,
|
||||||
|
Any,
|
||||||
|
cast,
|
||||||
|
)
|
||||||
|
|
||||||
|
from loguru import logger
|
||||||
|
import numpy as np
|
||||||
|
import plotly.graph_objects as go
|
||||||
|
import streamlit as st
|
||||||
|
import anyio
|
||||||
|
from anyio.abc import TaskGroup
|
||||||
|
from anyio import create_memory_object_stream
|
||||||
|
from anyio.streams.memory import MemoryObjectSendStream, MemoryObjectReceiveStream
|
||||||
|
from aiomqtt import Client as MqttClient, Message as MqttMessage
|
||||||
|
from threading import Thread
|
||||||
|
from time import sleep
|
||||||
|
from pydantic import BaseModel, computed_field
|
||||||
|
from datetime import datetime
|
||||||
|
import awkward as ak
|
||||||
|
from awkward import Array as AwkwardArray, Record as AwkwardRecord
|
||||||
|
|
||||||
|
|
||||||
|
# https://handmadesoftware.medium.com/streamlit-asyncio-and-mongodb-f85f77aea825
|
||||||
|
class AppState(TypedDict):
|
||||||
|
worker_thread: Thread
|
||||||
|
client: MqttClient
|
||||||
|
message_queue: MemoryObjectReceiveStream[MqttMessage]
|
||||||
|
task_group: TaskGroup
|
||||||
|
history: dict[str, AwkwardArray]
|
||||||
|
|
||||||
|
|
||||||
|
MQTT_BROKER: Final[str] = "192.168.2.189"
|
||||||
|
MQTT_BROKER_PORT: Final[int] = 1883
|
||||||
|
MAX_LENGTH = 600
|
||||||
|
TOPIC: Final[str] = "GwData"
|
||||||
|
NDArray = np.ndarray
|
||||||
|
|
||||||
|
T = TypeVar("T")
|
||||||
|
|
||||||
|
|
||||||
|
def unwrap(value: Optional[T]) -> T:
|
||||||
|
if value is None:
|
||||||
|
raise ValueError("Value is None")
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
@st.cache_resource
|
||||||
|
def resource(params: Any = None):
|
||||||
|
client: Optional[MqttClient] = None
|
||||||
|
tx, rx = create_memory_object_stream[MqttMessage]()
|
||||||
|
tg: Optional[TaskGroup] = None
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
nonlocal tg
|
||||||
|
nonlocal client
|
||||||
|
tg = anyio.create_task_group()
|
||||||
|
async with tg:
|
||||||
|
client = MqttClient(MQTT_BROKER, port=MQTT_BROKER_PORT)
|
||||||
|
async with client:
|
||||||
|
await client.subscribe(TOPIC)
|
||||||
|
logger.info(
|
||||||
|
"Subscribed {}:{} to topic {}", MQTT_BROKER, MQTT_BROKER_PORT, TOPIC
|
||||||
|
)
|
||||||
|
# https://aiomqtt.bo3hm.com/subscribing-to-a-topic.html
|
||||||
|
async for message in client.messages:
|
||||||
|
await tx.send(message)
|
||||||
|
|
||||||
|
tr = Thread(target=anyio.run, args=(main,))
|
||||||
|
tr.start()
|
||||||
|
sleep(0.1)
|
||||||
|
state: AppState = {
|
||||||
|
"worker_thread": tr,
|
||||||
|
"client": unwrap(client),
|
||||||
|
"message_queue": rx,
|
||||||
|
"task_group": unwrap(tg),
|
||||||
|
"history": {},
|
||||||
|
}
|
||||||
|
return state
|
||||||
|
|
||||||
|
|
||||||
|
class GwMessage(TypedDict):
|
||||||
|
v: int
|
||||||
|
mid: int
|
||||||
|
time: int
|
||||||
|
ip: str
|
||||||
|
mac: str
|
||||||
|
devices: list[Any]
|
||||||
|
rssi: int
|
||||||
|
|
||||||
|
|
||||||
|
class DeviceMessage(BaseModel):
|
||||||
|
mac: str
|
||||||
|
"""
|
||||||
|
Hex string, capital letters, e.g. "D6AF1CA9C491"
|
||||||
|
"""
|
||||||
|
service: str
|
||||||
|
"""
|
||||||
|
Hex string, capital letters, e.g. "180D"
|
||||||
|
"""
|
||||||
|
characteristic: str
|
||||||
|
"""
|
||||||
|
Hex string, capital letters, e.g. "2A37"
|
||||||
|
"""
|
||||||
|
value: str
|
||||||
|
"""
|
||||||
|
Hex string, capital letters, e.g. "0056"
|
||||||
|
"""
|
||||||
|
rssi: int
|
||||||
|
|
||||||
|
@property
|
||||||
|
def value_bytes(self) -> bytes:
|
||||||
|
return bytes.fromhex(self.value)
|
||||||
|
|
||||||
|
|
||||||
|
def get_device_data(message: GwMessage) -> List[DeviceMessage]:
|
||||||
|
"""
|
||||||
|
devices
|
||||||
|
|
||||||
|
[[5,"D6AF1CA9C491","180D","2A37","0056",-58],[5,"A09E1AE4E710","180D","2A37","0055",-50]]
|
||||||
|
|
||||||
|
unknown, mac addr, service, characteristic, value (hex), rssi
|
||||||
|
"""
|
||||||
|
l: list[DeviceMessage] = []
|
||||||
|
for d in message["devices"]:
|
||||||
|
x, mac, service, characteristic, value, rssi = d
|
||||||
|
l.append(
|
||||||
|
DeviceMessage(
|
||||||
|
mac=mac,
|
||||||
|
service=service,
|
||||||
|
characteristic=characteristic,
|
||||||
|
value=value,
|
||||||
|
rssi=rssi,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return l
|
||||||
|
|
||||||
|
|
||||||
|
def payload_to_hr(payload: bytes) -> int:
|
||||||
|
"""
|
||||||
|
ignore the first byte, parse the rest as a big-endian integer
|
||||||
|
|
||||||
|
Bit 0 (Heart Rate Format)
|
||||||
|
0: Heart rate value is 8 bits
|
||||||
|
1: Heart rate value is 16 bits
|
||||||
|
Bit 3 (Energy Expended)
|
||||||
|
Indicates whether energy expended data is present
|
||||||
|
Bit 4 (RR Interval)
|
||||||
|
Indicates whether RR interval data is present
|
||||||
|
"""
|
||||||
|
flags = payload[0]
|
||||||
|
if flags & 0b00000001:
|
||||||
|
return int.from_bytes(payload[1:3], "big")
|
||||||
|
else:
|
||||||
|
return payload[1]
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
state = resource()
|
||||||
|
logger.info("Resource created")
|
||||||
|
history = state["history"]
|
||||||
|
|
||||||
|
def push_new_message(message: GwMessage):
|
||||||
|
dms = get_device_data(message)
|
||||||
|
now = datetime.now()
|
||||||
|
for dm in dms:
|
||||||
|
rec = AwkwardRecord(
|
||||||
|
{
|
||||||
|
"time": now,
|
||||||
|
"value": payload_to_hr(dm.value_bytes),
|
||||||
|
"rssi": dm.rssi,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
if dm.mac not in history:
|
||||||
|
history[dm.mac] = AwkwardArray([rec])
|
||||||
|
else:
|
||||||
|
history[dm.mac] = ak.concatenate([history[dm.mac], [rec]])
|
||||||
|
if len(history[dm.mac]) > MAX_LENGTH:
|
||||||
|
history[dm.mac] = AwkwardArray(history[dm.mac][-MAX_LENGTH:])
|
||||||
|
|
||||||
|
def on_export():
|
||||||
|
now = datetime.now()
|
||||||
|
filename = f"export-{now.strftime('%Y-%m-%d-%H-%M-%S')}.parquet"
|
||||||
|
ak.to_parquet([history], filename)
|
||||||
|
logger.info("Export to {}", filename)
|
||||||
|
|
||||||
|
def on_clear():
|
||||||
|
history.clear()
|
||||||
|
logger.info("History cleared")
|
||||||
|
|
||||||
|
st.button(
|
||||||
|
"Export", help="Export the current data to a parquet file", on_click=on_export
|
||||||
|
)
|
||||||
|
st.button("Clear", help="Clear the current data", on_click=on_clear)
|
||||||
|
pannel = st.empty()
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
message = state["message_queue"].receive_nowait()
|
||||||
|
except anyio.WouldBlock:
|
||||||
|
continue
|
||||||
|
m: str
|
||||||
|
if isinstance(message.payload, str):
|
||||||
|
m = message.payload
|
||||||
|
elif isinstance(message.payload, bytes):
|
||||||
|
m = message.payload.decode("utf-8")
|
||||||
|
else:
|
||||||
|
logger.warning("Unknown message type: {}", type(message.payload))
|
||||||
|
continue
|
||||||
|
d = cast(GwMessage, orjson.loads(m))
|
||||||
|
push_new_message(d)
|
||||||
|
|
||||||
|
def to_scatter(key: str, dev_history: AwkwardArray):
|
||||||
|
x = ak.to_numpy(dev_history["time"])
|
||||||
|
y = ak.to_numpy(dev_history["value"])
|
||||||
|
return go.Scatter(x=x, y=y, mode="lines+markers", name=key)
|
||||||
|
|
||||||
|
scatters = [to_scatter(k, el) for k, el in history.items()]
|
||||||
|
fig = go.Figure(scatters)
|
||||||
|
pannel.plotly_chart(fig)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
|
||||||
|
# 1659A202
|
||||||
24
pyproject.toml
Normal file
24
pyproject.toml
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
[project]
|
||||||
|
name = "max_visualizer"
|
||||||
|
version = "0.1.0"
|
||||||
|
description = "python scripts for testing embedded system"
|
||||||
|
readme = "README.md"
|
||||||
|
requires-python = ">=3.12"
|
||||||
|
dependencies = [
|
||||||
|
"aiomqtt>=2.4.0",
|
||||||
|
"anyio>=4.9.0",
|
||||||
|
"awkward>=2.8.3",
|
||||||
|
"betterproto[compiler]>=2.0.0b7",
|
||||||
|
"bleak>=0.22.3",
|
||||||
|
"grpcio-tools>=1.71.0",
|
||||||
|
"loguru>=0.7.3",
|
||||||
|
"matplotlib>=3.10.3",
|
||||||
|
"paho-mqtt>=2.1.0",
|
||||||
|
"plotly>=6.1.2",
|
||||||
|
"pydantic>=2.11.5",
|
||||||
|
"seaborn>=0.13.2",
|
||||||
|
"streamlit>=1.45.1",
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.scripts]
|
||||||
|
main = "main:main"
|
||||||
216
visualize_hr_data.py
Normal file
216
visualize_hr_data.py
Normal file
@ -0,0 +1,216 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Visualize HR data from two CSV files with max value alignment
|
||||||
|
"""
|
||||||
|
|
||||||
|
import pandas as pd
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
import numpy as np
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
import seaborn as sns
|
||||||
|
|
||||||
|
|
||||||
|
def load_and_visualize_hr_data():
|
||||||
|
"""Load and visualize HR data from both CSV files with max value alignment"""
|
||||||
|
|
||||||
|
# Load the first CSV file (with timestamps)
|
||||||
|
print("Loading 2025-06-10_16_26.CSV...")
|
||||||
|
df1 = pd.read_csv("2025-06-10_16_26.CSV")
|
||||||
|
print(f"Dataset 1 shape: {df1.shape}")
|
||||||
|
print(f"Dataset 1 columns: {df1.columns.tolist()}")
|
||||||
|
print(f"Dataset 1 HR range: {df1['hr'].min():.1f} - {df1['hr'].max():.1f}")
|
||||||
|
|
||||||
|
# Load the second CSV file (HR only)
|
||||||
|
print("\nLoading history_20250610_165414_HR.csv...")
|
||||||
|
df2 = pd.read_csv("history_20250610_165414_HR.csv")
|
||||||
|
print(f"Dataset 2 shape: {df2.shape}")
|
||||||
|
print(f"Dataset 2 columns: {df2.columns.tolist()}")
|
||||||
|
print(f"Dataset 2 HR range: {df2['HR'].min():.1f} - {df2['HR'].max():.1f}")
|
||||||
|
|
||||||
|
# Find the maximum values and their indices
|
||||||
|
max_hr1 = df1["hr"].max()
|
||||||
|
max_idx1 = df1["hr"].idxmax()
|
||||||
|
max_hr2 = df2["HR"].max()
|
||||||
|
max_idx2 = df2["HR"].idxmax()
|
||||||
|
|
||||||
|
print(f"\nDataset 1 max HR: {max_hr1:.1f} at index {max_idx1}")
|
||||||
|
print(f"Dataset 2 max HR: {max_hr2:.1f} at index {max_idx2}")
|
||||||
|
|
||||||
|
# Create time indices for both datasets
|
||||||
|
df1["time_seconds"] = df1.index # Use index as time in seconds
|
||||||
|
df2["time_seconds"] = df2.index # Use index as time in seconds
|
||||||
|
|
||||||
|
# Align datasets by shifting so max values occur at the same time
|
||||||
|
# We'll align both to time = 0 at their respective max points
|
||||||
|
df1["time_aligned"] = df1["time_seconds"] - max_idx1
|
||||||
|
df2["time_aligned"] = df2["time_seconds"] - max_idx2
|
||||||
|
|
||||||
|
print(f"\nAfter alignment:")
|
||||||
|
print(
|
||||||
|
f"Dataset 1 time range: {df1['time_aligned'].min():.1f} to {df1['time_aligned'].max():.1f} seconds"
|
||||||
|
)
|
||||||
|
print(
|
||||||
|
f"Dataset 2 time range: {df2['time_aligned'].min():.1f} to {df2['time_aligned'].max():.1f} seconds"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create the visualization
|
||||||
|
plt.style.use("seaborn-v0_8")
|
||||||
|
fig, axes = plt.subplots(2, 2, figsize=(15, 10))
|
||||||
|
fig.suptitle(
|
||||||
|
"Heart Rate Data Visualization (Max Values Aligned)",
|
||||||
|
fontsize=16,
|
||||||
|
fontweight="bold",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Plot 1: Time series of both datasets (aligned)
|
||||||
|
axes[0, 0].plot(
|
||||||
|
df1["time_aligned"],
|
||||||
|
df1["hr"],
|
||||||
|
"o-",
|
||||||
|
alpha=0.7,
|
||||||
|
markersize=3,
|
||||||
|
label=f"Dataset 1 (max: {max_hr1:.1f})",
|
||||||
|
color="blue",
|
||||||
|
)
|
||||||
|
axes[0, 0].plot(
|
||||||
|
df2["time_aligned"],
|
||||||
|
df2["HR"],
|
||||||
|
"s-",
|
||||||
|
alpha=0.7,
|
||||||
|
markersize=3,
|
||||||
|
label=f"Dataset 2 (max: {max_hr2:.1f})",
|
||||||
|
color="red",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Highlight the max values
|
||||||
|
axes[0, 0].axvline(
|
||||||
|
x=0, color="black", linestyle="--", alpha=0.5, label="Max values aligned"
|
||||||
|
)
|
||||||
|
axes[0, 0].scatter([0], [max_hr1], color="blue", s=100, zorder=5, marker="*")
|
||||||
|
axes[0, 0].scatter([0], [max_hr2], color="red", s=100, zorder=5, marker="*")
|
||||||
|
|
||||||
|
axes[0, 0].set_xlabel("Time (seconds, aligned to max)")
|
||||||
|
axes[0, 0].set_ylabel("Heart Rate (bpm)")
|
||||||
|
axes[0, 0].set_title("Heart Rate Over Time (Aligned at Max Values)")
|
||||||
|
axes[0, 0].legend()
|
||||||
|
axes[0, 0].grid(True, alpha=0.3)
|
||||||
|
|
||||||
|
# Plot 2: Distribution comparison
|
||||||
|
axes[0, 1].hist(
|
||||||
|
df1["hr"], bins=20, alpha=0.7, label="Dataset 1", color="blue", density=True
|
||||||
|
)
|
||||||
|
axes[0, 1].hist(
|
||||||
|
df2["HR"], bins=20, alpha=0.7, label="Dataset 2", color="red", density=True
|
||||||
|
)
|
||||||
|
axes[0, 1].axvline(
|
||||||
|
max_hr1, color="blue", linestyle="--", alpha=0.8, label=f"Max 1: {max_hr1:.1f}"
|
||||||
|
)
|
||||||
|
axes[0, 1].axvline(
|
||||||
|
max_hr2, color="red", linestyle="--", alpha=0.8, label=f"Max 2: {max_hr2:.1f}"
|
||||||
|
)
|
||||||
|
axes[0, 1].set_xlabel("Heart Rate (bpm)")
|
||||||
|
axes[0, 1].set_ylabel("Density")
|
||||||
|
axes[0, 1].set_title("Heart Rate Distribution")
|
||||||
|
axes[0, 1].legend()
|
||||||
|
axes[0, 1].grid(True, alpha=0.3)
|
||||||
|
|
||||||
|
# Plot 3: Box plot comparison
|
||||||
|
data_for_box = [df1["hr"], df2["HR"]]
|
||||||
|
labels = ["Dataset 1\n(2025-06-10_16_26)", "Dataset 2\n(history_20250610_165414)"]
|
||||||
|
bp = axes[1, 0].boxplot(data_for_box, tick_labels=labels, patch_artist=True)
|
||||||
|
bp["boxes"][0].set_facecolor("lightblue")
|
||||||
|
bp["boxes"][1].set_facecolor("lightcoral")
|
||||||
|
axes[1, 0].set_ylabel("Heart Rate (bpm)")
|
||||||
|
axes[1, 0].set_title("Heart Rate Distribution Comparison")
|
||||||
|
axes[1, 0].grid(True, alpha=0.3)
|
||||||
|
|
||||||
|
# Plot 4: Overlay plot around max values (zoomed in view)
|
||||||
|
# Show ±20 seconds around the aligned max values
|
||||||
|
window = 20
|
||||||
|
df1_window = df1[(df1["time_aligned"] >= -window) & (df1["time_aligned"] <= window)]
|
||||||
|
df2_window = df2[(df2["time_aligned"] >= -window) & (df2["time_aligned"] <= window)]
|
||||||
|
|
||||||
|
if len(df1_window) > 0:
|
||||||
|
axes[1, 1].plot(
|
||||||
|
df1_window["time_aligned"],
|
||||||
|
df1_window["hr"],
|
||||||
|
"o-",
|
||||||
|
alpha=0.8,
|
||||||
|
markersize=4,
|
||||||
|
label=f"Dataset 1",
|
||||||
|
color="blue",
|
||||||
|
linewidth=2,
|
||||||
|
)
|
||||||
|
if len(df2_window) > 0:
|
||||||
|
axes[1, 1].plot(
|
||||||
|
df2_window["time_aligned"],
|
||||||
|
df2_window["HR"],
|
||||||
|
"s-",
|
||||||
|
alpha=0.8,
|
||||||
|
markersize=4,
|
||||||
|
label=f"Dataset 2",
|
||||||
|
color="red",
|
||||||
|
linewidth=2,
|
||||||
|
)
|
||||||
|
|
||||||
|
axes[1, 1].axvline(
|
||||||
|
x=0, color="black", linestyle="--", alpha=0.5, label="Max alignment"
|
||||||
|
)
|
||||||
|
axes[1, 1].scatter(
|
||||||
|
[0], [max_hr1], color="blue", s=150, zorder=5, marker="*", edgecolor="black"
|
||||||
|
)
|
||||||
|
axes[1, 1].scatter(
|
||||||
|
[0], [max_hr2], color="red", s=150, zorder=5, marker="*", edgecolor="black"
|
||||||
|
)
|
||||||
|
|
||||||
|
axes[1, 1].set_xlabel("Time (seconds, aligned to max)")
|
||||||
|
axes[1, 1].set_ylabel("Heart Rate (bpm)")
|
||||||
|
axes[1, 1].set_title(f"Zoomed View Around Max Values (±{window}s)")
|
||||||
|
axes[1, 1].legend()
|
||||||
|
axes[1, 1].grid(True, alpha=0.3)
|
||||||
|
|
||||||
|
plt.tight_layout()
|
||||||
|
|
||||||
|
# Save the plot
|
||||||
|
output_file = "hr_data_visualization_aligned.png"
|
||||||
|
plt.savefig(output_file, dpi=300, bbox_inches="tight")
|
||||||
|
print(f"\nVisualization saved as: {output_file}")
|
||||||
|
|
||||||
|
# Show the plot
|
||||||
|
plt.show()
|
||||||
|
|
||||||
|
# Print detailed statistics
|
||||||
|
print("\n" + "=" * 60)
|
||||||
|
print("DETAILED STATISTICS (MAX VALUES ALIGNED)")
|
||||||
|
print("=" * 60)
|
||||||
|
print("\nDataset 1 (2025-06-10_16_26.CSV):")
|
||||||
|
print(f" Records: {len(df1)}")
|
||||||
|
print(f" Mean HR: {df1['hr'].mean():.1f} bpm")
|
||||||
|
print(f" Median HR: {df1['hr'].median():.1f} bpm")
|
||||||
|
print(f" Max HR: {max_hr1:.1f} bpm (at original index {max_idx1})")
|
||||||
|
print(f" Std Dev: {df1['hr'].std():.1f} bpm")
|
||||||
|
print(f" Range: {df1['hr'].min():.1f} - {df1['hr'].max():.1f} bpm")
|
||||||
|
|
||||||
|
print("\nDataset 2 (history_20250610_165414_HR.csv):")
|
||||||
|
print(f" Records: {len(df2)}")
|
||||||
|
print(f" Mean HR: {df2['HR'].mean():.1f} bpm")
|
||||||
|
print(f" Median HR: {df2['HR'].median():.1f} bpm")
|
||||||
|
print(f" Max HR: {max_hr2:.1f} bpm (at original index {max_idx2})")
|
||||||
|
print(f" Std Dev: {df2['HR'].std():.1f} bpm")
|
||||||
|
print(f" Range: {df2['HR'].min():.1f} - {df2['HR'].max():.1f} bpm")
|
||||||
|
|
||||||
|
print(f"\nAlignment Info:")
|
||||||
|
print(f" Max HR difference: {abs(max_hr1 - max_hr2):.1f} bpm")
|
||||||
|
print(
|
||||||
|
f" Time shift applied: Dataset 1 shifted by -{max_idx1}s, Dataset 2 shifted by -{max_idx2}s"
|
||||||
|
)
|
||||||
|
|
||||||
|
print("\n" + "=" * 60)
|
||||||
|
|
||||||
|
return df1, df2
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
print("Starting HR data visualization with max value alignment...")
|
||||||
|
df1, df2 = load_and_visualize_hr_data()
|
||||||
|
print("Visualization complete!")
|
||||||
Reference in New Issue
Block a user