diff --git a/.github/workflows/merge.yml b/.github/workflows/merge.yml index 8a2853497..a3f69cecf 100644 --- a/.github/workflows/merge.yml +++ b/.github/workflows/merge.yml @@ -5,7 +5,7 @@ name: Latest release env: CACHE_VERSION: 21 - DEFAULT_PYTHON: "3.12" + DEFAULT_PYTHON: "3.13" # Only run on merges on: diff --git a/.github/workflows/verify.yml b/.github/workflows/verify.yml index 8e4a6ad49..d7a847135 100644 --- a/.github/workflows/verify.yml +++ b/.github/workflows/verify.yml @@ -4,8 +4,8 @@ name: Latest commit env: - CACHE_VERSION: 22 - DEFAULT_PYTHON: "3.12" + CACHE_VERSION: 7 + DEFAULT_PYTHON: "3.13" PRE_COMMIT_HOME: ~/.cache/pre-commit on: @@ -22,7 +22,7 @@ jobs: name: Prepare steps: - name: Check out committed code - uses: actions/checkout@v4 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python uses: actions/setup-python@v5 @@ -48,9 +48,8 @@ jobs: pip install virtualenv --upgrade python -m venv venv . venv/bin/activate - pip install uv - uv pip install -U pip setuptools wheel - uv pip install -r requirements_test.txt -r requirements_commit.txt + pip install -U pip setuptools wheel + pip install -r requirements_test.txt -r requirements_commit.txt - name: Restore pre-commit environment from cache id: cache-precommit uses: actions/cache@v4 @@ -72,7 +71,7 @@ jobs: needs: prepare steps: - name: Check out committed code - uses: actions/checkout@v4 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 with: persist-credentials: false - name: Set up Python ${{ env.DEFAULT_PYTHON }} @@ -98,7 +97,7 @@ jobs: - name: Ruff (check) run: | . venv/bin/activate - #ruff check plugwise_usb/*py tests/*py + #ruff plugwise_usb/*py tests/*py echo "***" echo "***" echo "Code is not up to par for ruff, skipping" @@ -108,7 +107,7 @@ jobs: if: failure() run: | . venv/bin/activate - ruff check --fix plugwise_usb/*py tests/*py + ruff --fix plugwise_usb/*py tests/*py git config --global user.name 'autoruff' git config --global user.email 'plugwise_usb@users.noreply.github.com' git remote set-url origin https://x-access-token:${{ secrets.PAT_CT }}@github.com/$GITHUB_REPOSITORY @@ -125,7 +124,7 @@ jobs: - dependencies_check steps: - name: Check out committed code - uses: actions/checkout@v4 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python uses: actions/setup-python@v5 @@ -173,10 +172,10 @@ jobs: needs: commitcheck strategy: matrix: - python-version: ["3.12", "3.11", "3.10"] + python-version: ["3.12", "3.11"] steps: - name: Check out committed code - uses: actions/checkout@v4 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 - name: Set up Python ${{ matrix.python-version }} id: python uses: actions/setup-python@v5 @@ -200,12 +199,11 @@ jobs: run: | python -m venv venv . venv/bin/activate - pip install uv - uv pip install -U pip setuptools wheel - # uv pip install -r requirements_test.txt + pip install -U pip setuptools wheel + #pip install -r requirements_test.txt # 20220124 Mimic setup_test.sh - uv pip install --upgrade -r requirements_test.txt -c https://raw.githubusercontent.com/home-assistant/core/dev/homeassistant/package_constraints.txt -r https://raw.githubusercontent.com/home-assistant/core/dev/requirements_test.txt -r https://raw.githubusercontent.com/home-assistant/core/dev/requirements_test_pre_commit.txt - uv pip install --upgrade pytest-asyncio + pip install --upgrade -r requirements_test.txt -c https://raw.githubusercontent.com/home-assistant/core/dev/homeassistant/package_constraints.txt -r https://raw.githubusercontent.com/home-assistant/core/dev/requirements_test.txt -r https://raw.githubusercontent.com/home-assistant/core/dev/requirements_test_pre_commit.txt + pip install --upgrade pytest-asyncio pytest: runs-on: ubuntu-latest @@ -213,11 +211,11 @@ jobs: needs: prepare-test-cache strategy: matrix: - python-version: ["3.12", "3.11", "3.10"] + python-version: ["3.12", "3.11"] steps: - name: Check out committed code - uses: actions/checkout@v4 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 - name: Set up Python ${{ matrix.python-version }} id: python uses: actions/setup-python@v5 @@ -255,7 +253,7 @@ jobs: needs: pytest steps: - name: Check out committed code - uses: actions/checkout@v4 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 with: persist-credentials: false - name: Set up Python ${{ env.DEFAULT_PYTHON }} @@ -295,7 +293,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Check out committed code - uses: actions/checkout@v4 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 - name: Run ShellCheck uses: ludeeus/action-shellcheck@master @@ -305,7 +303,7 @@ jobs: name: Dependency steps: - name: Check out committed code - uses: actions/checkout@v4 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 - name: Run dependency checker run: scripts/dependencies_check.sh debug @@ -315,7 +313,7 @@ jobs: needs: pytest steps: - name: Check out committed code - uses: actions/checkout@v4 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python uses: actions/setup-python@v5 @@ -360,7 +358,7 @@ jobs: needs: [coverage, mypy] steps: - name: Check out committed code - uses: actions/checkout@v4 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python uses: actions/setup-python@v5 @@ -403,7 +401,7 @@ jobs: needs: coverage steps: - name: Check out committed code - uses: actions/checkout@v4 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 - name: Set up Python ${{ env.DEFAULT_PYTHON }} id: python uses: actions/setup-python@v5 diff --git a/.gitignore b/.gitignore index a45429225..11dac2da0 100644 --- a/.gitignore +++ b/.gitignore @@ -11,6 +11,7 @@ tests/__pycache__ .coverage .vscode venv +.venv fixtures/* !fixtures/.keep *.sedbck diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index bbec947dc..40975393e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -7,7 +7,7 @@ ci: default_language_version: # force all unspecified python hooks to run python3 - python: python3.12 + python: python3.13 repos: # Run manually in CI skipping the branch checks @@ -28,18 +28,18 @@ repos: args: - --branch=main - repo: https://github.com/asottile/pyupgrade - rev: v3.19.0 + rev: v3.19.1 hooks: - id: pyupgrade args: [--py39-plus] # Moved codespell configuration to setup.cfg as per 'all-files' issues not reading args - repo: https://github.com/codespell-project/codespell - rev: v2.3.0 + rev: v2.4.1 hooks: - id: codespell exclude_types: [csv, json] - repo: https://github.com/PyCQA/bandit - rev: 1.8.0 + rev: 1.8.2 hooks: - id: bandit args: @@ -52,7 +52,7 @@ repos: hooks: - id: yamllint - repo: https://github.com/biomejs/pre-commit - rev: v0.5.0 + rev: v0.6.1 hooks: - id: biome-lint additional_dependencies: ["@biomejs/biome@1.8.3"] @@ -102,6 +102,6 @@ repos: language: script pass_filenames: false - repo: https://github.com/igorshubovych/markdownlint-cli - rev: v0.43.0 + rev: v0.44.0 hooks: - id: markdownlint diff --git a/CHANGELOG.md b/CHANGELOG.md index b3375a260..deb700187 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,10 +1,20 @@ # Changelog -## Ongoing +## v0.40.0 (a22) -- Ensure CI process remains operational -- Bumped pip to now prepend uv for using quicker dependency resolving and installing -- As for latest HA Core USB team should rework to python 3.12 (not still 3.10) +- Correcting messageflow to HA + +## v0.40.0 (a4) + +Full rewrite of library into async version. Main list of changes: + +- Full async and typed +- Improved protocol handling +- Support for local caching of collected data to improve startup and device detection +- Improved handling of edge cases especially for energy data collection +- Based on detected firmware version enable the supported features +- API details about supported data is combined into api.py +- Added tests ## v0.31.4(a0) @@ -12,7 +22,7 @@ ## v0.31.3 -- Bugfix midnight rollover for cicrles without power usage registered during first hour(s) +- Bugfix midnight rollover for circles without power usage registered during first hour(s) ## v0.31.2 diff --git a/plugwise_usb/__init__.py b/plugwise_usb/__init__.py index a4bbe45b3..a00ca68da 100644 --- a/plugwise_usb/__init__.py +++ b/plugwise_usb/__init__.py @@ -1,740 +1,364 @@ -"""Use of this source code is governed by the MIT license found in the LICENSE file. +"""Main stick object to control associated plugwise plugs. -Main stick object to control associated plugwise plugs +Use of this source code is governed by the MIT license found +in the LICENSE file. """ -from datetime import datetime, timedelta + +from __future__ import annotations + +from asyncio import get_running_loop +from collections.abc import Callable, Coroutine +from functools import wraps import logging -import sys -import threading -import time - -from .constants import ( - ACCEPT_JOIN_REQUESTS, - CB_JOIN_REQUEST, - CB_NEW_NODE, - MESSAGE_TIME_OUT, - NODE_TYPE_CELSIUS_NR, - NODE_TYPE_CELSIUS_SED, - NODE_TYPE_CIRCLE, - NODE_TYPE_CIRCLE_PLUS, - NODE_TYPE_SCAN, - NODE_TYPE_SENSE, - NODE_TYPE_STEALTH, - NODE_TYPE_SWITCH, - PRIORITY_LOW, - STATE_ACTIONS, - UTF8_DECODE, - WATCHDOG_DEAMON, -) -from .controller import StickMessageController -from .exceptions import ( - CirclePlusError, - NetworkDown, - PortError, - StickInitError, - TimeoutException, -) -from .messages.requests import ( - NodeAddRequest, - NodeAllowJoiningRequest, - NodeInfoRequest, - NodePingRequest, - NodeRemoveRequest, - StickInitRequest, -) -from .messages.responses import ( - NodeAckLargeResponse, - NodeAckResponse, - NodeInfoResponse, - NodeJoinAvailableResponse, - NodeRemoveResponse, - NodeResponse, - StickInitResponse, -) -from .nodes.circle import PlugwiseCircle -from .nodes.circle_plus import PlugwiseCirclePlus -from .nodes.scan import PlugwiseScan -from .nodes.sense import PlugwiseSense -from .nodes.stealth import PlugwiseStealth -from .util import validate_mac +from typing import Any, TypeVar, cast + +from .api import NodeEvent, PlugwiseNode, StickEvent +from .connection import StickController +from .exceptions import StickError, SubscriptionError +from .network import StickNetwork + +FuncT = TypeVar("FuncT", bound=Callable[..., Any]) + _LOGGER = logging.getLogger(__name__) +def raise_not_connected(func: FuncT) -> FuncT: + """Validate existence of an active connection to Stick. Raise StickError when there is no active connection.""" + @wraps(func) + def decorated(*args: Any, **kwargs: Any) -> Any: + if not args[0].is_connected: + raise StickError( + "Not connected to USB-Stick, connect to USB-stick first." + ) + return func(*args, **kwargs) + return cast(FuncT, decorated) + + +def raise_not_initialized(func: FuncT) -> FuncT: + """Validate if active connection is initialized. Raise StickError when not initialized.""" + @wraps(func) + def decorated(*args: Any, **kwargs: Any) -> Any: + if not args[0].is_initialized: + raise StickError( + "Connection to USB-Stick is not initialized, " + + "initialize USB-stick first." + ) + return func(*args, **kwargs) + return cast(FuncT, decorated) + + class Stick: """Plugwise connection stick.""" - def __init__(self, port, callback=None): - self.circle_plus_mac = None - self.init_callback = None - self.msg_controller = None - self.scan_callback = None - - self._accept_join_requests = ACCEPT_JOIN_REQUESTS - self._auto_update_manually = False - self._auto_update_timer = 0 - self._circle_plus_discovered = False - self._circle_plus_retries = 0 - self._device_nodes = {} - self._joined_nodes = 0 - self._mac_stick = None - self._messages_for_undiscovered_nodes = [] - self._network_id = None - self._network_online = False - self._nodes_discovered = None - self._nodes_not_discovered = {} - self._nodes_off_line = 0 - self._nodes_to_discover = {} + def __init__( + self, port: str | None = None, cache_enabled: bool = True + ) -> None: + """Initialize Stick.""" + self._loop = get_running_loop() + self._loop.set_debug(True) + self._controller = StickController() + self._network: StickNetwork | None = None + self._cache_enabled = cache_enabled self._port = port - self._run_update_thread = False - self._run_watchdog = None - self._stick_callbacks = {} - self._stick_initialized = False - self._update_thread = None - self._watchdog_thread = None + self._cache_folder: str = "" + + @property + def cache_folder(self) -> str: + """Path to store cached data.""" + return self._cache_folder + + @cache_folder.setter + def cache_folder(self, cache_folder: str) -> None: + """Set path to store cached data.""" + if cache_folder == self._cache_folder: + return + if self._network is not None: + self._network.cache_folder = cache_folder + self._cache_folder = cache_folder + + @property + def cache_enabled(self) -> bool: + """Indicates if caching is active.""" + return self._cache_enabled + + @cache_enabled.setter + def cache_enabled(self, enable: bool = True) -> None: + """Enable or disable usage of cache.""" + if self._network is not None: + self._network.cache_enabled = enable + self._cache_enabled = enable + + @property + def nodes(self) -> dict[str, PlugwiseNode]: + """Dictionary with all discovered and supported plugwise devices with the MAC address as their key.""" + if self._network is None: + return {} + return self._network.nodes - if callback: - self.auto_initialize(callback) + @property + def is_connected(self) -> bool: + """Current connection state to USB-Stick.""" + return self._controller.is_connected + + @property + def is_initialized(self) -> bool: + """Current initialization state of USB-Stick connection.""" + return self._controller.is_initialized + + @property + def joined_nodes(self) -> int | None: + """Total number of nodes registered to Circle+ including Circle+ itself.""" + if ( + not self._controller.is_connected + or self._network is None + or self._network.registry is None + ): + return None + return len(self._network.registry) + 1 + + @property + def firmware(self) -> str: + """Firmware of USB-Stick.""" + return self._controller.firmware_stick @property - def devices(self) -> dict: - """All discovered and supported plugwise devices with the MAC address as their key""" - return self._device_nodes + def hardware(self) -> str: + """Hardware of USB-Stick.""" + return self._controller.hardware_stick @property - def joined_nodes(self) -> int: - """Return total number of nodes registered to Circle+ including Circle+ itself.""" - return self._joined_nodes + 1 + def mac_stick(self) -> str: + """MAC address of USB-Stick. Raises StickError is connection is missing.""" + return self._controller.mac_stick @property - def mac(self) -> str: - """Return the MAC address of the USB-Stick.""" - if self._mac_stick: - return self._mac_stick.decode(UTF8_DECODE) - return None + def mac_coordinator(self) -> str: + """MAC address of the network coordinator (Circle+). Raises StickError is connection is missing.""" + return self._controller.mac_coordinator + + @property + def name(self) -> str: + """Return name of Stick.""" + return self._controller.stick_name + + @property + def network_discovered(self) -> bool: + """Indicate if discovery of network is active. Raises StickError is connection is missing.""" + if self._network is None: + return False + return self._network.is_running @property def network_state(self) -> bool: - """Return the state of the Plugwise network.""" - return self._network_online + """Indicate state of the Plugwise network.""" + if not self._controller.is_connected: + return False + return self._controller.network_online @property def network_id(self) -> int: - """Return the id of the Plugwise network.""" - return self._network_id + """Network id of the Plugwise network. Raises StickError is connection is missing.""" + return self._controller.network_id @property - def port(self) -> str: + def port(self) -> str | None: """Return currently configured port to USB-Stick.""" return self._port @port.setter - def port(self, port: str): - """Set port to USB-Stick.""" - if self.msg_controller: - self.disconnect() - self._port = port - - def auto_initialize(self, callback=None): - """Automatic initialization of USB-stick and discovery of all registered nodes.""" - - def init_finished(): - if not self._network_online: - _LOGGER.Error("plugwise Zigbee network down") - else: - self.scan(callback) - - if not self.msg_controller: - self.msg_controller = StickMessageController( - self.port, self.message_processor, self.node_state_updates - ) - try: - self.msg_controller.connect_to_stick() - self.initialize_stick(init_finished) - except PortError as err: - _LOGGER.error("Failed to connect: '%s'", err) - except StickInitError as err: - _LOGGER.error("Failed to initialize USBstick: '%s'", err) - except NetworkDown: - _LOGGER.error("Failed to communicated: Plugwise Zigbee network") - except TimeoutException: - _LOGGER.error("Timeout exception while initializing USBstick") - except Exception as err: # pylint: disable=broad-except - _LOGGER.error("Unknown error : %s", err) - - def connect(self, callback=None): - """Startup message controller and connect to stick.""" - if not self.msg_controller: - self.msg_controller = StickMessageController( - self.port, self.message_processor, self.node_state_updates - ) - if self.msg_controller.connect_to_stick(callback): - # update daemon - self._run_update_thread = False - self._auto_update_timer = 0 - self._update_thread = threading.Thread( - None, self._update_loop, "update_thread", (), {} - ) - self._update_thread.daemon = True - - def initialize_stick(self, callback=None, timeout=MESSAGE_TIME_OUT): - """Initialize the USB-stick, start watchdog thread and raise an error if this fails.""" - if not self.msg_controller.connection.is_connected(): - raise StickInitError - _LOGGER.debug("Send init request to Plugwise Zigbee stick") - self.msg_controller.send(StickInitRequest(), callback) - time_counter = 0 - while not self._stick_initialized and (time_counter < timeout): - time_counter += 0.1 - time.sleep(0.1) - if not self._stick_initialized: - raise StickInitError - if not self._network_online: - raise NetworkDown - - def initialize_circle_plus(self, callback=None, timeout=MESSAGE_TIME_OUT): - """Initialize connection from USB-Stick to the Circle+/Stealth+ node and raise an error if this fails.""" + def port(self, port: str) -> None: + """Path to serial port of USB-Stick.""" if ( - not self.msg_controller.connection.is_connected() - or not self._stick_initialized - or not self.circle_plus_mac + self._controller.is_connected + and port != self._port ): - raise StickInitError - self.discover_node(self.circle_plus_mac, callback) - - time_counter = 0 - while not self._circle_plus_discovered and (time_counter < timeout): - time_counter += 0.1 - time.sleep(0.1) - if not self._circle_plus_discovered: - raise CirclePlusError - - def disconnect(self): - """Disconnect from stick and raise error if it fails""" - self._run_watchdog = False - self._run_update_thread = False - self._auto_update_timer = 0 - if self.msg_controller: - self.msg_controller.disconnect_from_stick() - self.msg_controller = None - - def subscribe_stick_callback(self, callback, callback_type): - """Subscribe callback to execute.""" - if callback_type not in self._stick_callbacks: - self._stick_callbacks[callback_type] = [] - self._stick_callbacks[callback_type].append(callback) - - def unsubscribe_stick_callback(self, callback, callback_type): - """Register callback to execute.""" - if callback_type in self._stick_callbacks: - self._stick_callbacks[callback_type].remove(callback) - - def allow_join_requests(self, enable: bool, accept: bool): - """Enable or disable Plugwise network - Automatically accept new join request - """ - self.msg_controller.send(NodeAllowJoiningRequest(enable)) - if enable: - self._accept_join_requests = accept - else: - self._accept_join_requests = False - - def scan(self, callback=None): - """Scan and try to detect all registered nodes.""" - self.scan_callback = callback - self.scan_for_registered_nodes() - - def scan_circle_plus(self): - """Scan the Circle+ memory for registered nodes.""" - if self._device_nodes.get(self.circle_plus_mac): - _LOGGER.debug("Scan Circle+ for linked nodes...") - self._device_nodes[self.circle_plus_mac].scan_for_nodes(self.discover_nodes) - else: - _LOGGER.error("Circle+ is not discovered yet") - - def scan_for_registered_nodes(self): - """Discover Circle+ and all registered nodes at Circle+.""" - if self.circle_plus_mac: - if self._device_nodes.get(self.circle_plus_mac): - self.scan_circle_plus() - else: - _LOGGER.debug("Discover Circle+ at %s", self.circle_plus_mac) - self.discover_node(self.circle_plus_mac, self.scan_circle_plus) - else: - _LOGGER.error( - "Plugwise stick not properly initialized, Circle+ MAC is missing." + raise StickError( + "Unable to change port while connected. Disconnect first" ) + if self._port is None: + self._port = port + if port != self._port: + self._port = port - def discover_nodes(self, nodes_to_discover): - """Helper to discover all registered nodes.""" - _LOGGER.debug("Scan plugwise network finished") - self._nodes_discovered = 0 - self._nodes_to_discover = nodes_to_discover - self._joined_nodes = len(nodes_to_discover) - - # setup timeout for node discovery - discover_timeout = 10 + (len(nodes_to_discover) * 2) + (MESSAGE_TIME_OUT) - threading.Timer(discover_timeout, self.scan_timeout_expired).start() - _LOGGER.debug("Start discovery of linked node types...") - for mac in nodes_to_discover: - self.discover_node(mac, self.node_discovered_by_scan) - - def node_discovered_by_scan(self, nodes_off_line=False): - """Node discovered by initial scan.""" - if nodes_off_line: - self._nodes_off_line += 1 - self._nodes_discovered += 1 - _LOGGER.debug( - "Discovered Plugwise node %s (%s off-line) of %s", - str(len(self._device_nodes)), - str(self._nodes_off_line), - str(len(self._nodes_to_discover)), - ) - if (len(self._device_nodes) - 1 + self._nodes_off_line) >= len( - self._nodes_to_discover - ): - if self._nodes_off_line == 0: - self._nodes_to_discover = {} - self._nodes_not_discovered = {} - else: - for mac in self._nodes_to_discover: - if not self._device_nodes.get(mac): - _LOGGER.info( - "Failed to discover node type for registered MAC '%s'. This is expected for battery powered nodes, they will be discovered at their first awake", - str(mac), - ) - else: - if mac in self._nodes_not_discovered: - del self._nodes_not_discovered[mac] - self.msg_controller.discovery_finished = True - if self.scan_callback: - self.scan_callback() - - def scan_timeout_expired(self): - """Timeout for initial scan.""" - if not self.msg_controller.discovery_finished: - for mac in self._nodes_to_discover: - # TODO: 20220206 is there 'mac' in the dict? Otherwise it can be rewritten as below (twice as fast above .get) - # if mac not in self._device_nodes: - if not self._device_nodes.get(mac): - _LOGGER.info( - "Failed to discover node type for registered MAC '%s'. This is expected for battery powered nodes, they will be discovered at their first awake", - str(mac), - ) - else: - if mac in self._nodes_not_discovered: - del self._nodes_not_discovered[mac] - if self.scan_callback: - self.scan_callback() - - def _append_node(self, mac, address, node_type): - """Add node to list of controllable nodes""" - _LOGGER.debug( - "Add new node type (%s) with mac %s", - str(node_type), - mac, - ) - if node_type == NODE_TYPE_CIRCLE_PLUS: - self._device_nodes[mac] = PlugwiseCirclePlus( - mac, address, self.msg_controller.send - ) - elif node_type == NODE_TYPE_CIRCLE: - self._device_nodes[mac] = PlugwiseCircle( - mac, address, self.msg_controller.send - ) - elif node_type == NODE_TYPE_SWITCH: - self._device_nodes[mac] = None - elif node_type == NODE_TYPE_SENSE: - self._device_nodes[mac] = PlugwiseSense( - mac, address, self.msg_controller.send - ) - elif node_type == NODE_TYPE_SCAN: - self._device_nodes[mac] = PlugwiseScan( - mac, address, self.msg_controller.send - ) - elif node_type == NODE_TYPE_CELSIUS_SED: - self._device_nodes[mac] = None - elif node_type == NODE_TYPE_CELSIUS_NR: - self._device_nodes[mac] = None - elif node_type == NODE_TYPE_STEALTH: - self._device_nodes[mac] = PlugwiseStealth( - mac, address, self.msg_controller.send - ) - else: - _LOGGER.warning("Unsupported node type '%s'", str(node_type)) - self._device_nodes[mac] = None - - # process previous missed messages - msg_to_process = self._messages_for_undiscovered_nodes[:] - self._messages_for_undiscovered_nodes = [] - for msg in msg_to_process: - self.message_processor(msg) - - def node_state_updates(self, mac, state: bool): - """Update availability state of a node""" - if self._device_nodes.get(mac): - if not self._device_nodes[mac].battery_powered: - self._device_nodes[mac].available = state - - def node_join(self, mac: str, callback=None) -> bool: - """Accept node to join Plugwise network by register mac in Circle+ memory""" - if validate_mac(mac): - self.msg_controller.send( - NodeAddRequest(bytes(mac, UTF8_DECODE), True), callback + @property + def accept_join_request(self) -> bool | None: + """Automatically accept joining request of new nodes.""" + if not self._controller.is_connected: + return None + if self._network is None or not self._network.is_running: + return None + return self._network.accept_join_request + + @accept_join_request.setter + def accept_join_request(self, state: bool) -> None: + """Configure join request setting.""" + if not self._controller.is_connected: + raise StickError( + "Cannot accept joining node" + + " without an active USB-Stick connection." ) - return True - _LOGGER.warning("Invalid mac '%s' address, unable to join node manually.", mac) - return False - - def node_unjoin(self, mac: str, callback=None) -> bool: - """Remove node from the Plugwise network by deleting mac from the Circle+ memory""" - if validate_mac(mac): - self.msg_controller.send( - NodeRemoveRequest(bytes(self.circle_plus_mac, UTF8_DECODE), mac), - callback, + if self._network is None or not self._network.is_running: + raise StickError( + "Cannot accept joining node" + + "without node discovery be activated. Call discover() first." ) - return True + self._network.accept_join_request = state - _LOGGER.warning( - "Invalid mac '%s' address, unable to unjoin node manually.", mac - ) - return False - - def _remove_node(self, mac): - """Remove node from list of controllable nodes.""" - if self._device_nodes.get(mac): - del self._device_nodes[mac] - else: - _LOGGER.warning("Node %s does not exists, unable to remove node.", mac) - - def message_processor(self, message: NodeResponse): - """Received message from Plugwise network.""" - mac = message.mac.decode(UTF8_DECODE) - if isinstance(message, (NodeAckLargeResponse, NodeAckResponse)): - if message.ack_id in STATE_ACTIONS: - self._pass_message_to_node(message, mac) - elif isinstance(message, NodeInfoResponse): - self._process_node_info_response(message, mac) - elif isinstance(message, StickInitResponse): - self._process_stick_init_response(message) - elif isinstance(message, NodeJoinAvailableResponse): - self._process_node_join_request(message, mac) - elif isinstance(message, NodeRemoveResponse): - self._process_node_remove(message) - else: - self._pass_message_to_node(message, mac) - - def _process_stick_init_response(self, stick_init_response: StickInitResponse): - """Process StickInitResponse message.""" - self._mac_stick = stick_init_response.mac - if stick_init_response.network_is_online.value == 1: - self._network_online = True - else: - self._network_online = False - # Replace first 2 characters by 00 for mac of circle+ node - self.circle_plus_mac = "00" + stick_init_response.circle_plus_mac.value[ - 2: - ].decode(UTF8_DECODE) - self._network_id = stick_init_response.network_id.value - self._stick_initialized = True - if not self._run_watchdog: - self._run_watchdog = True - self._watchdog_thread = threading.Thread( - None, self._watchdog_loop, "watchdog_thread", (), {} - ) - self._watchdog_thread.daemon = True - self._watchdog_thread.start() - - def _process_node_info_response(self, node_info_response, mac): - """Process NodeInfoResponse message.""" - if not self._pass_message_to_node(node_info_response, mac, False): - _LOGGER.debug( - "Received NodeInfoResponse from currently unknown node with mac %s with sequence id %s", - mac, - str(node_info_response.seq_id), - ) - if node_info_response.node_type.value == NODE_TYPE_CIRCLE_PLUS: - self._circle_plus_discovered = True - self._append_node(mac, 0, node_info_response.node_type.value) - if mac in self._nodes_not_discovered: - del self._nodes_not_discovered[mac] - else: - if mac in self._nodes_to_discover: - _LOGGER.info( - "Node with mac %s discovered", - mac, - ) - self._append_node( - mac, - self._nodes_to_discover[mac], - node_info_response.node_type.value, - ) - self._pass_message_to_node(node_info_response, mac) - - def _process_node_join_request(self, node_join_request, mac): - """Process NodeJoinAvailableResponse message from a node that - is not part of a plugwise network yet and wants to join + async def clear_cache(self) -> None: + """Clear current cache.""" + if self._network is not None: + await self._network.clear_cache() + + def subscribe_to_stick_events( + self, + stick_event_callback: Callable[[StickEvent], Coroutine[Any, Any, None]], + events: tuple[StickEvent], + ) -> Callable[[], None]: + """Subscribe callback when specified StickEvent occurs. + + Returns the function to be called to unsubscribe later. """ - if self._device_nodes.get(mac): - _LOGGER.debug( - "Received node available message for node %s which is already joined.", - mac, - ) - else: - if self._accept_join_requests: - # Send accept join request - _LOGGER.info( - "Accepting network join request for node with mac %s", - mac, - ) - self.msg_controller.send(NodeAddRequest(node_join_request.mac, True)) - self._nodes_not_discovered[mac] = (None, None) - else: - _LOGGER.debug( - "New node with mac %s requesting to join Plugwise network, do callback", - mac, - ) - self.do_callback(CB_JOIN_REQUEST, mac) - - def _process_node_remove(self, node_remove_response): - """Process NodeRemoveResponse message with confirmation - if node is is removed from the Plugwise network. + return self._controller.subscribe_to_stick_events( + stick_event_callback, + events, + ) + + @raise_not_initialized + def subscribe_to_node_events( + self, + node_event_callback: Callable[[NodeEvent, str], Coroutine[Any, Any, None]], + events: tuple[NodeEvent, ...], + ) -> Callable[[], None]: + """Subscribe callback to be called when specific NodeEvent occurs. + + Returns the function to be called to unsubscribe later. """ - unjoined_mac = node_remove_response.node_mac_id.value - if node_remove_response.status.value == 1: - if self._device_nodes.get(unjoined_mac): - del self._device_nodes[unjoined_mac] - _LOGGER.info( - "Received NodeRemoveResponse from node %s it has been unjoined from Plugwise network", - unjoined_mac, - ) - else: - _LOGGER.debug( - "Unknown node with mac %s has been unjoined from Plugwise network", - unjoined_mac, - ) - else: - _LOGGER.warning( - "Node with mac %s failed to unjoin from Plugwise network ", - unjoined_mac, - ) + if self._network is None: + raise SubscriptionError("Unable to subscribe to node events without network connection initialized") + return self._network.subscribe_to_node_events( + node_event_callback, + events, + ) - def _pass_message_to_node(self, message, mac, discover=True): - """Pass message to node class to take action on message + def _validate_node_discovery(self) -> None: + """Validate if network discovery is running. - Returns True if message has passed onto existing known node + Raises StickError if network is not active. """ - if self._device_nodes.get(mac): - self._device_nodes[mac].message_for_node(message) - return True + if self._network is None or not self._network.is_running: + raise StickError("Plugwise network node discovery is not active.") + + async def setup( + self, discover: bool = True, load: bool = True + ) -> None: + """Fully connect, initialize USB-Stick and discover all connected nodes.""" + if not self.is_connected: + await self.connect() + if not self.is_initialized: + await self.initialize() if discover: - _LOGGER.info( - "Queue %s from %s because node is not discovered yet.", - message.__class__.__name__, - mac, + await self.start_network() + await self.discover_coordinator() + await self.discover_nodes() + if load: + await self.load_nodes() + + async def connect(self, port: str | None = None) -> None: + """Connect to USB-Stick. Raises StickError if connection fails.""" + if self._controller.is_connected: + raise StickError( + f"Already connected to {self._port}, " + + "Close existing connection before (re)connect." ) - self._messages_for_undiscovered_nodes.append(message) - self.discover_node(mac, self._discover_after_scan, True) - return False - - def _watchdog_loop(self): - """Main worker loop to watch all other worker threads""" - time.sleep(5) - circle_plus_retry_counter = 0 - while self._run_watchdog: - # Connection - if self.msg_controller.connection.is_connected(): - # Connection reader daemon - if not self.msg_controller.connection.read_thread_alive(): - _LOGGER.warning("Unexpected halt of connection reader thread") - # Connection writer daemon - if not self.msg_controller.connection.write_thread_alive(): - _LOGGER.warning("Unexpected halt of connection writer thread") - # receive timeout daemon - if ( - self.msg_controller.receive_timeout_thread_state - and self.msg_controller.receive_timeout_thread_is_alive - ): - self.msg_controller.restart_receive_timeout_thread() - # send message daemon - if ( - self.msg_controller.send_message_thread_state - and self.msg_controller.send_message_thread_is_alive - ): - self.msg_controller.restart_send_message_thread() - # Update daemon - if self._run_update_thread: - if not self._update_thread.is_alive(): - _LOGGER.warning( - "Unexpected halt of update thread, restart thread", - ) - self._run_update_thread = True - self._update_thread = threading.Thread( - None, self._update_loop, "update_thread", (), {} - ) - self._update_thread.daemon = True - self._update_thread.start() - # Circle+ discovery - if not self._circle_plus_discovered: - # First hour every once an hour - if self._circle_plus_retries < 60 or circle_plus_retry_counter > 60: - _LOGGER.info( - "Circle+ not yet discovered, resubmit discovery request" - ) - self.discover_node(self.circle_plus_mac, self.scan) - self._circle_plus_retries += 1 - circle_plus_retry_counter = 0 - circle_plus_retry_counter += 1 - watchdog_loop_checker = 0 - while watchdog_loop_checker < WATCHDOG_DEAMON and self._run_watchdog: - time.sleep(1) - watchdog_loop_checker += 1 - _LOGGER.debug("watchdog loop stopped") - - def _update_loop(self): - """When node has not received any message during - last 2 update polls, reset availability - """ - self._run_update_thread = True - _discover_counter = 0 - try: - while self._run_update_thread: - for mac, device in self._device_nodes.items(): - if device: - if device.battery_powered: - # Check availability state of SED's - self._check_availability_of_seds(mac) - elif device.measures_power: - # Request current power usage of those that reply on ping - device.do_ping(device.request_power_update) - else: - # Do ping request for all non SED's - device.do_ping() - - # Do a single ping for undiscovered nodes once per 10 update cycles - if _discover_counter == 10: - for mac in self._nodes_not_discovered: - self.msg_controller.send( - NodePingRequest(bytes(mac, UTF8_DECODE)), - None, - -1, - PRIORITY_LOW, - ) - _discover_counter = 0 - else: - _discover_counter += 1 - - if self._auto_update_timer and self._run_update_thread: - update_loop_checker = 0 - while ( - update_loop_checker < self._auto_update_timer - and self._run_update_thread - ): - time.sleep(1) - update_loop_checker += 1 - - # TODO: narrow exception - except Exception as err: # pylint: disable=broad-except - _exc_type, _exc_obj, exc_tb = sys.exc_info() - _LOGGER.error( - "Error at line %s of _update_loop : %s", exc_tb.tb_lineno, err + if port is not None: + self._port = port + + if self._port is None: + raise StickError( + "Unable to connect. " + + "Path to USB-Stick is not defined, set port property first" ) - _LOGGER.debug("Update loop stopped") - - def auto_update(self, timer=None): - """Configure auto update polling daemon for power usage and availability state.""" - if timer: - self._auto_update_timer = timer - self._auto_update_manually = True - elif timer == 0: - self._auto_update_timer = 0 - self._run_update_thread = False - else: - # Timer based on a minimum of 5 seconds + 1 second for each node supporting power measurement - if not self._auto_update_manually: - count_nodes = 0 - for _, node in self._device_nodes.items(): - if node.measures_power: - count_nodes += 1 - self._auto_update_timer = 5 + (count_nodes * 1) - _LOGGER.info( - "Update interval is (re)set to %s seconds", - str(self._auto_update_timer), - ) - if not self._run_update_thread: - self._update_thread.start() - - # Helper functions - def do_callback(self, callback_type, callback_arg=None): - """Helper to execute registered callbacks for specified callback type.""" - if callback_type in self._stick_callbacks: - for callback in self._stick_callbacks[callback_type]: - try: - if callback_arg is None: - callback() - else: - callback(callback_arg) - # TODO: narrow exception - except Exception as err: # pylint: disable=broad-except - _LOGGER.error("Error while executing callback : %s", err) - - def _check_availability_of_seds(self, mac): - """Helper to check if SED device is still sending its hartbeat.""" - if self._device_nodes[mac].available: - if self._device_nodes[mac].last_update < ( - datetime.now() - - timedelta(minutes=(self._device_nodes[mac].maintenance_interval + 1)) - ): - _LOGGER.info( - "No messages received within (%s minutes) of expected maintenance interval from node %s, mark as unavailable [%s > %s]", - str(self._device_nodes[mac].maintenance_interval), - mac, - str(self._device_nodes[mac].last_update), - str( - datetime.now() - - timedelta( - minutes=(self._device_nodes[mac].maintenance_interval + 1) - ) - ), - ) - self._device_nodes[mac].available = False - - def _discover_after_scan(self): - """Helper to do callback for new node.""" - node_discovered = None - for mac in self._nodes_not_discovered: - if self._device_nodes.get(mac): - node_discovered = mac - break - if node_discovered: - del self._nodes_not_discovered[node_discovered] - self.do_callback(CB_NEW_NODE, node_discovered) - self.auto_update() - - def discover_node(self, mac: str, callback=None, force_discover=False): - """Helper to try to discovery the node (type) based on mac.""" - if not validate_mac(mac) or self._device_nodes.get(mac): - return - if mac not in self._nodes_not_discovered: - self._nodes_not_discovered[mac] = ( - None, - None, + await self._controller.connect_to_stick( + self._port, + ) + + @raise_not_connected + async def initialize(self, create_root_cache_folder: bool = False) -> None: + """Initialize connection to USB-Stick.""" + await self._controller.initialize_stick() + if self._network is None: + self._network = StickNetwork(self._controller) + self._network.cache_folder = self._cache_folder + self._network.cache_folder_create = create_root_cache_folder + self._network.cache_enabled = self._cache_enabled + if self._cache_enabled: + await self._network.initialize_cache() + + @raise_not_connected + @raise_not_initialized + async def start_network(self) -> None: + """Start zigbee network.""" + if self._network is None: + self._network = StickNetwork(self._controller) + self._network.cache_folder = self._cache_folder + self._network.cache_enabled = self._cache_enabled + if self._cache_enabled: + await self._network.initialize_cache() + await self._network.start() + + @raise_not_connected + @raise_not_initialized + async def load_nodes(self) -> bool: + """Load all discovered nodes.""" + if self._network is None: + raise StickError( + "Cannot load nodes when network is not initialized" ) - self.msg_controller.send( - NodeInfoRequest(bytes(mac, UTF8_DECODE)), - callback, + if not self._network.is_running: + raise StickError( + "Cannot load nodes when network is not started" ) - else: - (firstrequest, lastrequest) = self._nodes_not_discovered[mac] - if not (firstrequest and lastrequest): - self.msg_controller.send( - NodeInfoRequest(bytes(mac, UTF8_DECODE)), - callback, - 0, - PRIORITY_LOW, - ) - elif force_discover: - self.msg_controller.send( - NodeInfoRequest(bytes(mac, UTF8_DECODE)), - callback, - ) + return await self._network.discover_nodes(load=True) + + @raise_not_connected + @raise_not_initialized + async def discover_coordinator(self, load: bool = False) -> None: + """Discover the network coordinator.""" + if self._network is None: + raise StickError( + "Cannot load nodes when network is not initialized" + ) + await self._network.discover_network_coordinator(load=load) + + @raise_not_connected + @raise_not_initialized + async def discover_nodes(self, load: bool = False) -> None: + """Discover all nodes.""" + if self._network is None: + raise StickError( + "Cannot load nodes when network is not initialized" + ) + await self._network.discover_nodes(load=load) + + @raise_not_connected + @raise_not_initialized + async def register_node(self, mac: str) -> bool: + """Add node to plugwise network.""" + if self._network is None: + return False + return await self._network.register_node(mac) + + @raise_not_connected + @raise_not_initialized + async def unregister_node(self, mac: str) -> None: + """Remove node to plugwise network.""" + if self._network is None: + return + await self._network.unregister_node(mac) + + async def disconnect(self) -> None: + """Disconnect from USB-Stick.""" + if self._network is not None: + await self._network.stop() + await self._controller.disconnect_from_stick() diff --git a/plugwise_usb/api.py b/plugwise_usb/api.py new file mode 100644 index 000000000..bbbfdaa6c --- /dev/null +++ b/plugwise_usb/api.py @@ -0,0 +1,684 @@ +"""Plugwise USB-Stick API.""" + +from dataclasses import dataclass +from datetime import datetime +from enum import Enum, auto +import logging +from typing import Any, Protocol + +_LOGGER = logging.getLogger(__name__) + + +class StickEvent(Enum): + """Plugwise USB Stick events for callback subscription.""" + + CONNECTED = auto() + DISCONNECTED = auto() + MESSAGE_RECEIVED = auto() + NETWORK_OFFLINE = auto() + NETWORK_ONLINE = auto() + + +class MotionSensitivity(Enum): + """Motion sensitivity levels for Scan devices.""" + + HIGH = auto() + MEDIUM = auto() + OFF = auto() + + +class NodeEvent(Enum): + """Plugwise Node events for callback subscription.""" + + AWAKE = auto() + DISCOVERED = auto() + LOADED = auto() + JOIN = auto() + + +class NodeFeature(str, Enum): + """USB Stick Node feature.""" + + AVAILABLE = "available" + BATTERY = "battery" + ENERGY = "energy" + HUMIDITY = "humidity" + INFO = "info" + MOTION = "motion" + MOTION_CONFIG = "motion_config" + PING = "ping" + POWER = "power" + RELAY = "relay" + RELAY_INIT = "relay_init" + SWITCH = "switch" + TEMPERATURE = "temperature" + + +class NodeType(Enum): + """USB Node types.""" + + STICK = 0 + CIRCLE_PLUS = 1 # AME_NC + CIRCLE = 2 # AME_NR + SWITCH = 3 # AME_SEDSwitch + SENSE = 5 # AME_SEDSense + SCAN = 6 # AME_SEDScan + CELSIUS_SED = 7 # AME_CelsiusSED + CELSIUS_NR = 8 # AME_CelsiusNR + STEALTH = 9 # AME_STEALTH_ZE + + +# 10 AME_MSPBOOTLOAD +# 11 AME_STAR + + +PUSHING_FEATURES = ( + NodeFeature.AVAILABLE, + NodeFeature.BATTERY, + NodeFeature.HUMIDITY, + NodeFeature.MOTION, + NodeFeature.MOTION_CONFIG, + NodeFeature.TEMPERATURE, + NodeFeature.SWITCH, +) + + +@dataclass(frozen=True) +class AvailableState: + """Availability of node. + + Description: Availability of node on Zigbee network. + + Attributes: + state: bool: Indicate if node is operational (True) or off-line (False). Battery powered nodes which are in sleeping mode report to be operational. + last_seen: datetime: Last time a messages was received from the Node. + + """ + + state: bool + last_seen: datetime + + +@dataclass(frozen=True) +class BatteryConfig: + """Battery related configuration settings. + + Description: Configuration settings for battery powered devices. + + Attributes: + awake_duration: int | None: Duration in seconds a battery powered devices is awake to accept (configuration) messages. + clock_interval: int | None: Interval in minutes a battery powered devices is synchronizing its clock. + clock_sync: bool | None: Indicate if the internal clock must be synced. + maintenance_interval: int | None: Interval in minutes a battery powered devices is awake for maintenance purposes. + sleep_duration: int | None: Interval in minutes a battery powered devices is sleeping. + + """ + + awake_duration: int | None = None + clock_interval: int | None = None + clock_sync: bool | None = None + maintenance_interval: int | None = None + sleep_duration: int | None = None + + +@dataclass +class NodeInfo: + """Node hardware information.""" + + mac: str + zigbee_address: int + is_battery_powered: bool = False + features: tuple[NodeFeature, ...] = (NodeFeature.INFO,) + firmware: datetime | None = None + name: str | None = None + model: str | None = None + model_type: str | None = None + node_type: NodeType | None = None + timestamp: datetime | None = None + version: str | None = None + + +@dataclass +class NetworkStatistics: + """Zigbee network information.""" + + timestamp: datetime | None = None + rssi_in: int | None = None + rssi_out: int | None = None + rtt: int | None = None + + +@dataclass +class PowerStatistics: + """Power statistics collection.""" + + last_second: float | None = None + last_8_seconds: float | None = None + timestamp: datetime | None = None + + +@dataclass(frozen=True) +class RelayConfig: + """Configuration of relay. + + Description: Configuration settings for relay. + + Attributes: + init_state: bool | None: Configured state at which the relay must be at initial power-up of device. + + """ + + init_state: bool | None = None + + +@dataclass(frozen=True) +class RelayState: + """Status of relay.""" + + state: bool | None = None + timestamp: datetime | None = None + + +@dataclass(frozen=True) +class MotionState: + """Status of motion sensor.""" + + state: bool | None = None + timestamp: datetime | None = None + + +@dataclass(frozen=True) +class MotionConfig: + """Configuration of motion sensor. + + Description: Configuration settings for motion detection. + When value is scheduled to be changed the returned value is the optimistic value + + Attributes: + reset_timer: int | None: Motion reset timer in minutes before the motion detection is switched off. + daylight_mode: bool | None: Motion detection when light level is below threshold. + sensitivity_level: MotionSensitivity | None: Motion sensitivity level. + + """ + + daylight_mode: bool | None = None + reset_timer: int | None = None + sensitivity_level: MotionSensitivity | None = None + + +@dataclass +class EnergyStatistics: + """Energy statistics collection.""" + + log_interval_consumption: int | None = None + log_interval_production: int | None = None + hour_consumption: float | None = None + hour_consumption_reset: datetime | None = None + day_consumption: float | None = None + day_consumption_reset: datetime | None = None + week_consumption: float | None = None + week_consumption_reset: datetime | None = None + hour_production: float | None = None + hour_production_reset: datetime | None = None + day_production: float | None = None + day_production_reset: datetime | None = None + week_production: float | None = None + week_production_reset: datetime | None = None + + +class PlugwiseNode(Protocol): + """Protocol definition of a Plugwise device node.""" + + # region Generic node properties + @property + def features(self) -> tuple[NodeFeature, ...]: + """Supported feature types of node.""" + + @property + def is_battery_powered(self) -> bool: + """Indicate if node is powered by battery.""" + + @property + def is_loaded(self) -> bool: + """Indicate if node is loaded and available to interact.""" + + @property + def name(self) -> str: + """Return name of node.""" + + @property + def node_info(self) -> NodeInfo: + """Return NodeInfo class with all node information.""" + + # endregion + async def load(self) -> bool: + """Load configuration and activate node features.""" + + async def unload(self) -> None: + """Unload and deactivate node.""" + + # region Network properties + @property + def available(self) -> bool: + """Last known network availability state.""" + + @property + def available_state(self) -> AvailableState: + """Network availability state.""" + + @property + def last_seen(self) -> datetime: + """Timestamp of last network activity.""" + + @property + def mac(self) -> str: + """Zigbee mac address.""" + + @property + def network_address(self) -> int: + """Zigbee network registration address.""" + + @property + def ping_stats(self) -> NetworkStatistics: + """Ping statistics for node.""" + + # endregion + + async def is_online(self) -> bool: + """Check network status of node.""" + + # TODO: Move to node with subscription to stick event + async def reconnect(self) -> None: + """Reconnect node to Plugwise Zigbee network.""" + + # TODO: Move to node with subscription to stick event + async def disconnect(self) -> None: + """Disconnect from Plugwise Zigbee network.""" + + # region Cache settings + @property + def cache_folder(self) -> str: + """Path to cache folder.""" + + @cache_folder.setter + def cache_folder(self, cache_folder: str) -> None: + """Path to cache folder.""" + + @property + def cache_folder_create(self) -> bool: + """Create cache folder when it does not exists.""" + + @cache_folder_create.setter + def cache_folder_create(self, enable: bool = True) -> None: + """Create cache folder when it does not exists.""" + + @property + def cache_enabled(self) -> bool: + """Activate caching of retrieved information.""" + + @cache_enabled.setter + def cache_enabled(self, enable: bool) -> None: + """Activate caching of retrieved information.""" + + async def clear_cache(self) -> None: + """Clear currently cached information.""" + + async def save_cache( + self, trigger_only: bool = True, full_write: bool = False + ) -> None: + """Write currently cached information to cache file.""" + + # endregion + + # region Sensors + @property + def energy(self) -> EnergyStatistics: + """Energy statistics. + + Raises NodeError when energy feature is not present at device. + """ + + @property + def humidity(self) -> float: + """Last received humidity state. + + Raises NodeError when humidity feature is not present at device. + """ + + @property + def motion(self) -> bool | None: + """Current state of motion detection. + + Raises NodeError when motion feature is not present at device. + """ + + @property + def motion_state(self) -> MotionState: + """Last known motion state information. + + Raises NodeError when motion feature is not present at device. + """ + + @property + def power(self) -> PowerStatistics: + """Current power statistics. + + Raises NodeError when power feature is not present at device. + """ + + @property + def relay(self) -> bool: + """Current state of relay. + + Raises NodeError when relay feature is not present at device. + """ + + @property + def relay_state(self) -> RelayState: + """Last known relay state information. + + Raises NodeError when relay feature is not present at device. + """ + + @property + def switch(self) -> bool: + """Current state of the switch. + + Raises NodeError when switch feature is not present at device. + """ + + @property + def temperature(self) -> float: + """Last received temperature state. + + Raises NodeError when temperature feature is not present at device. + """ + + # endregion + + # region control + async def get_state(self, features: tuple[NodeFeature]) -> dict[NodeFeature, Any]: + """Request an updated state for given feature. + + Returns the state or statistics for each requested feature. + """ + + # endregion + + # region Actions to execute + async def set_relay(self, state: bool) -> bool: + """Change the state of the relay. + + Description: + Configures the state of the relay. + + Args: + state: Boolean indicating the required state of the relay (True = ON, False = OFF) + + Returns: + Boolean: with the newly set state of the relay + + Raises: + FeatureError: When the relay feature is not present at device. + NodeError: When the node is not yet loaded or setting the state failed. + + """ + + # endregion + + # region configuration properties + + @property + def battery_config(self) -> BatteryConfig: + """Battery configuration settings. + + Returns: + BatteryConfig: Currently configured battery settings. + When settings are scheduled to be changed it will return the new settings. + + Raises: + FeatureError: When this configuration feature is not present at device. + NodeError: When the node is not yet loaded or configuration failed. + + """ + + @property + def motion_config(self) -> MotionConfig: + """Motion configuration settings. + + Returns: + MotionConfig: with the current motion configuration settings. + + Raises: + FeatureError: When this configuration feature is not present at device. + NodeError: When the node is not yet loaded or configuration failed. + + """ + + @property + def relay_config(self) -> RelayConfig: + """Relay configuration settings. + + Returns: + RelayConfig: Current relay configuration settings. + + Raises: + FeatureError: When this configuration feature is not present at device. + NodeError: When the node is not yet loaded or configuration failed. + + """ + + # endregion + + # region Configuration actions + async def set_awake_duration(self, seconds: int) -> bool: + """Change the awake duration. + + Description: + Configure the duration for a battery powered device (Sleeping Endpoint Device) to be awake. + The configuration will be set the next time the device is awake for maintenance purposes. + + Use the 'is_battery_powered' property to determine if the device is battery powered. + + Args: + seconds: Number of seconds between each time the device must wake-up for maintenance purposes + Minimum value: 1 + Maximum value: 255 + + Returns: + Boolean: True when the configuration is successfully scheduled to be changed. False when + the configuration is already set. + + Raises: + FeatureError: When this configuration feature is not present at device. + NodeError: When the node is not yet loaded or configuration failed. + ValueError: When the seconds value is out of range. + + """ + + async def set_clock_interval(self, minutes: int) -> bool: + """Change the clock interval. + + Description: + Configure the duration for a battery powered device (Sleeping Endpoint Device) to synchronize the internal clock. + Use the 'is_battery_powered' property to determine if the device is battery powered. + + Args: + minutes: Number of minutes between each time the device must synchronize the clock + Minimum value: 1 + Maximum value: 65535 + + Returns: + Boolean: True when the configuration is successfully scheduled to be changed. False when + the configuration is already set. + + Raises: + FeatureError: When this configuration feature is not present at device. + NodeError: When the node is not yet loaded or configuration failed. + ValueError: When the minutes value is out of range. + + """ + + async def set_clock_sync(self, sync: bool) -> bool: + """Change the clock synchronization setting. + + Description: + Configure the duration for a battery powered device (Sleeping Endpoint Device) to synchronize the internal clock. + Use the 'is_battery_powered' property to determine if the device is battery powered. + + Args: + sync: Boolean indicating the internal clock must be synced (True = sync enabled, False = sync disabled) + + Returns: + Boolean: True when the configuration is successfully scheduled to be changed. False when + the configuration is already set. + + Raises: + FeatureError: When this configuration feature is not present at device. + NodeError: When the node is not yet loaded or configuration failed. + + """ + + async def set_maintenance_interval(self, minutes: int) -> bool: + """Change the maintenance interval. + + Description: + Configure the maintenance interval for a battery powered device (Sleeping Endpoint Device). + The configuration will be set the next time the device is awake for maintenance purposes. + + Use the 'is_battery_powered' property to determine if the device is battery powered. + + Args: + minutes: Number of minutes between each time the device must wake-up for maintenance purposes + Minimum value: 1 + Maximum value: 1440 + + Returns: + Boolean: True when the configuration is successfully scheduled to be changed. False when + the configuration is already set. + + Raises: + FeatureError: When this configuration feature is not present at device. + NodeError: When the node is not yet loaded or configuration failed. + ValueError: When the seconds value is out of range. + + """ + + async def set_motion_daylight_mode(self, state: bool) -> bool: + """Configure motion daylight mode. + + Description: + Configure if motion must be detected when light level is below threshold. + + Args: + state: Boolean indicating the required state (True = ON, False = OFF) + + Returns: + Boolean: with the newly configured state of the daylight mode + + Raises: + FeatureError: When the daylight mode feature is not present at device. + NodeError: When the node is not yet loaded or configuration failed. + + """ + + async def set_motion_reset_timer(self, minutes: int) -> bool: + """Configure the motion reset timer in minutes. + + Description: + Configure the duration in minutes a Scan device must not detect motion before reporting no motion. + The configuration will be set the next time the device is awake for maintenance purposes. + + Use the 'is_battery_powered' property to determine if the device is battery powered. + + Args: + minutes: Number of minutes before the motion detection is switched off + Minimum value: 1 + Maximum value: 255 + + Returns: + Boolean: True when the configuration is successfully scheduled to be changed. False when + the configuration is already set. + + Raises: + FeatureError: When this configuration feature is not present at device. + NodeError: When the node is not yet loaded or configuration failed. + ValueError: When the seconds value is out of range. + + """ + + async def set_motion_sensitivity_level(self, level: MotionSensitivity) -> bool: + """Configure motion sensitivity level. + + Description: + Configure the sensitivity level of motion detection. + + Args: + level: MotionSensitivity indicating the required sensitivity level + + Returns: + Boolean: True when the configuration is successfully scheduled to be changed. False when + the configuration is already set. + + Raises: + FeatureError: When the motion sensitivity feature is not present at device. + NodeError: When the node is not yet loaded or configuration failed. + + """ + + async def set_relay_init(self, state: bool) -> bool: + """Change the initial state of the relay. + + Description: + Configures the state of the relay to be directly after power-up of the device. + + Args: + state: Boolean indicating the required state of the relay (True = ON, False = OFF) + + Returns: + Boolean: with the newly configured state of the relay + + Raises: + FeatureError: When the initial (power-up) relay configure feature is not present at device. + NodeError: When the node is not yet loaded or configuration failed. + + """ + + async def set_sleep_duration(self, minutes: int) -> bool: + """Change the sleep duration. + + Description: + Configure the duration for a battery powered device (Sleeping Endpoint Device) to sleep. + Use the 'is_battery_powered' property to determine if the device is battery powered. + + Args: + minutes: Number of minutes to sleep + Minimum value: 1 + Maximum value: 65535 + + Returns: + Boolean: True when the configuration is successfully scheduled to be changed. False when + the configuration is already set. + + Raises: + FeatureError: When this configuration feature is not present at device. + NodeError: When the node is not yet loaded or configuration failed. + ValueError: When the minutes value is out of range. + + """ + + # endregion + + # region Helper functions + async def message_for_node(self, message: Any) -> None: + """Process message for node. + + Description: Submit a plugwise message for this node. + + Args: + message: Plugwise message to process. + + """ + + + # endregion diff --git a/plugwise_usb/connection/__init__.py b/plugwise_usb/connection/__init__.py new file mode 100644 index 000000000..bb2559b3c --- /dev/null +++ b/plugwise_usb/connection/__init__.py @@ -0,0 +1,257 @@ +"""Manage the connection and communication flow through the USB-Stick.""" + +from __future__ import annotations + +from collections.abc import Awaitable, Callable, Coroutine +import logging +from typing import Any + +from ..api import StickEvent +from ..constants import UTF8 +from ..exceptions import NodeError, StickError +from ..helpers.util import version_to_model +from ..messages.requests import ( + NodeInfoRequest, + NodePingRequest, + PlugwiseRequest, + StickInitRequest, +) +from ..messages.responses import ( + NodeInfoResponse, + NodePingResponse, + PlugwiseResponse, + StickInitResponse, +) +from .manager import StickConnectionManager +from .queue import StickQueue + +_LOGGER = logging.getLogger(__name__) + + +class StickController: + """Manage the connection and communication towards USB-Stick.""" + + def __init__(self) -> None: + """Initialize Stick controller.""" + self._manager = StickConnectionManager() + self._queue = StickQueue() + self._unsubscribe_stick_event: Callable[[], None] | None = None + self._init_sequence_id: bytes | None = None + self._is_initialized = False + self._fw_stick: str | None = None + self._hw_stick: str | None = None + self._mac_stick: str | None = None + self._mac_nc: str | None = None + self._network_id: int | None = None + self._network_online = False + self.stick_name: str | None = None + + @property + def is_initialized(self) -> bool: + """Returns True if UBS-Stick connection is active and initialized.""" + if not self._manager.is_connected: + return False + return self._is_initialized + + @property + def is_connected(self) -> bool: + """Return connection state from connection manager.""" + return self._manager.is_connected + + @property + def firmware_stick(self) -> str | None: + """Firmware version of the Stick.""" + return self._fw_stick + + @property + def hardware_stick(self) -> str | None: + """Hardware version of the Stick.""" + return self._hw_stick + + @property + def mac_stick(self) -> str: + """MAC address of USB-Stick. Raises StickError when not connected.""" + if not self._manager.is_connected or self._mac_stick is None: + raise StickError( + "No mac address available. Connect and initialize USB-Stick first." + ) + return self._mac_stick + + @property + def mac_coordinator(self) -> str: + """Return MAC address of the Zigbee network coordinator (Circle+). + + Raises StickError when not connected. + """ + if not self._manager.is_connected or self._mac_nc is None: + raise StickError( + "No mac address available. Connect and initialize USB-Stick first." + ) + return self._mac_nc + + @property + def network_id(self) -> int: + """Returns the Zigbee network ID. Raises StickError when not connected.""" + if not self._manager.is_connected or self._network_id is None: + raise StickError( + "No network ID available. Connect and initialize USB-Stick first." + ) + return self._network_id + + @property + def network_online(self) -> bool: + """Return the network state.""" + if not self._manager.is_connected: + raise StickError( + "Network status not available. Connect and initialize USB-Stick first." + ) + return self._network_online + + async def connect_to_stick(self, serial_path: str) -> None: + """Connect to USB stick.""" + if self._manager.is_connected: + raise StickError("Already connected") + await self._manager.setup_connection_to_stick(serial_path) + if self._unsubscribe_stick_event is None: + self._unsubscribe_stick_event = self._manager.subscribe_to_stick_events( + self._handle_stick_event, + (StickEvent.CONNECTED, StickEvent.DISCONNECTED), + ) + self._queue.start(self._manager) + + def subscribe_to_stick_events( + self, + stick_event_callback: Callable[[StickEvent], Awaitable[None]], + events: tuple[StickEvent, ...], + ) -> Callable[[], None]: + """Subscribe callback when specified StickEvent occurs. + + Returns the function to be called to unsubscribe later. + """ + if self._manager is None: + raise StickError("Connect to stick before subscribing to events") + return self._manager.subscribe_to_stick_events( + stick_event_callback, + events, + ) + + async def subscribe_to_messages( + self, + node_response_callback: Callable[[PlugwiseResponse], Coroutine[Any, Any, bool]], + mac: bytes | None = None, + message_ids: tuple[bytes] | None = None, + seq_id: bytes | None = None, + ) -> Callable[[], None]: + """Subscribe a awaitable callback to be called when a specific message is received. + + Returns function to unsubscribe. + """ + return await self._manager.subscribe_to_messages( + node_response_callback, mac, message_ids, seq_id + ) + + async def _handle_stick_event(self, event: StickEvent) -> None: + """Handle stick event.""" + if event == StickEvent.CONNECTED: + if not self._queue.is_running: + self._queue.start(self._manager) + await self.initialize_stick() + elif event == StickEvent.DISCONNECTED: + if self._queue.is_running: + await self._queue.stop() + + async def initialize_stick(self) -> None: + """Initialize connection to the USB-stick.""" + if not self._manager.is_connected: + raise StickError( + "Cannot initialize USB-stick, connected to USB-stick first" + ) + if not self._queue.is_running: + raise StickError("Cannot initialize, queue manager not running") + + try: + request = StickInitRequest(self.send) + init_response: StickInitResponse | None = await request.send() + except StickError as err: + raise StickError( + "No response from USB-Stick to initialization request." + + " Validate USB-stick is connected to port " + + f"' {self._manager.serial_path}'" + ) from err + if init_response is None: + raise StickError( + "No response from USB-Stick to initialization request." + + " Validate USB-stick is connected to port " + + f"' {self._manager.serial_path}'" + ) + self._mac_stick = init_response.mac_decoded + self.stick_name = f"Stick {self._mac_stick[-5:]}" + self._network_online = init_response.network_online + + # Replace first 2 characters by 00 for mac of circle+ node + self._mac_nc = init_response.mac_network_controller + self._network_id = init_response.network_id + self._is_initialized = True + + # Add Stick NodeInfoRequest + node_info, _ = await self.get_node_details(self._mac_stick, ping_first=False) + if node_info is not None: + self._fw_stick = node_info.firmware + hardware, _ = version_to_model(node_info.hardware) + self._hw_stick = hardware + + if not self._network_online: + raise StickError("Zigbee network connection to Circle+ is down.") + + async def get_node_details( + self, mac: str, ping_first: bool + ) -> tuple[NodeInfoResponse | None, NodePingResponse | None]: + """Return node discovery type.""" + ping_response: NodePingResponse | None = None + if ping_first: + # Define ping request with one retry + ping_request = NodePingRequest( + self.send, bytes(mac, UTF8), retries=1 + ) + try: + ping_response = await ping_request.send(suppress_node_errors=True) + except StickError: + return (None, None) + if ping_response is None: + return (None, None) + + info_request = NodeInfoRequest( + self.send, bytes(mac, UTF8), retries=1 + ) + try: + info_response = await info_request.send() + except StickError: + return (None, None) + return (info_response, ping_response) + + async def send( + self, request: PlugwiseRequest, suppress_node_errors: bool = True + ) -> PlugwiseResponse | None: + """Submit request to queue and return response.""" + if not suppress_node_errors: + return await self._queue.submit(request) + try: + return await self._queue.submit(request) + except (NodeError, StickError): + return None + + def _reset_states(self) -> None: + """Reset internal connection information.""" + self._mac_stick = None + self._mac_nc = None + self._network_id = None + self._network_online = False + + async def disconnect_from_stick(self) -> None: + """Disconnect from USB-Stick.""" + if self._unsubscribe_stick_event is not None: + self._unsubscribe_stick_event() + self._unsubscribe_stick_event = None + if self._queue.is_running: + await self._queue.stop() + await self._manager.disconnect_from_stick() diff --git a/plugwise_usb/connection/manager.py b/plugwise_usb/connection/manager.py new file mode 100644 index 000000000..74f1203e3 --- /dev/null +++ b/plugwise_usb/connection/manager.py @@ -0,0 +1,196 @@ +"""Manage the communication flow through the USB-Stick towards the Plugwise (propriety) Zigbee like network.""" + +from __future__ import annotations + +from asyncio import Future, gather, get_event_loop, wait_for +from collections.abc import Awaitable, Callable, Coroutine +import logging +from typing import Any + +from serial import EIGHTBITS, PARITY_NONE, STOPBITS_ONE, SerialException +from serial_asyncio_fast import SerialTransport, create_serial_connection + +from ..api import StickEvent +from ..exceptions import StickError +from ..messages.requests import PlugwiseRequest +from ..messages.responses import PlugwiseResponse +from .receiver import StickReceiver +from .sender import StickSender + +_LOGGER = logging.getLogger(__name__) + + +class StickConnectionManager: + """Manage the message flow to and from USB-Stick.""" + + def __init__(self) -> None: + """Initialize Stick controller.""" + self._sender: StickSender | None = None + self._receiver: StickReceiver | None = None + self._serial_transport: SerialTransport | None = None + self._port = "" + self._connected: bool = False + self._stick_event_subscribers: dict[ + Callable[[], None], + tuple[Callable[[StickEvent], Awaitable[None]], tuple[StickEvent, ...]], + ] = {} + self._unsubscribe_stick_events: Callable[[], None] | None = None + + @property + def queue_depth(self) -> int: + return self._sender.processed_messages - self._receiver.processed_messages + + def correct_received_messages(self, correction: int) -> None: + self._receiver.correct_processed_messages(correction) + + @property + def serial_path(self) -> str: + """Return current port.""" + return self._port + + @property + def is_connected(self) -> bool: + """Returns True if UBS-Stick connection is active.""" + if not self._connected: + return False + if self._receiver is None: + return False + return self._receiver.is_connected + + def _subscribe_to_stick_events(self) -> None: + """Subscribe to handle stick events by manager.""" + if not self.is_connected or self._receiver is None: + raise StickError("Unable to subscribe to events") + if self._unsubscribe_stick_events is None: + self._unsubscribe_stick_events = self._receiver.subscribe_to_stick_events( + self._handle_stick_event, + (StickEvent.CONNECTED, StickEvent.DISCONNECTED), + ) + + async def _handle_stick_event( + self, + event: StickEvent, + ) -> None: + """Call callback for stick event subscribers.""" + if len(self._stick_event_subscribers) == 0: + return + callback_list: list[Awaitable[None]] = [] + for callback, stick_events in self._stick_event_subscribers.values(): + if event in stick_events: + callback_list.append(callback(event)) + if len(callback_list) > 0: + await gather(*callback_list) + + def subscribe_to_stick_events( + self, + stick_event_callback: Callable[[StickEvent], Awaitable[None]], + events: tuple[StickEvent, ...], + ) -> Callable[[], None]: + """Subscribe callback when specified StickEvent occurs. + + Returns the function to be called to unsubscribe later. + """ + + def remove_subscription() -> None: + """Remove stick event subscription.""" + self._stick_event_subscribers.pop(remove_subscription) + + self._stick_event_subscribers[remove_subscription] = ( + stick_event_callback, + events, + ) + return remove_subscription + + async def subscribe_to_messages( + self, + node_response_callback: Callable[[PlugwiseResponse], Coroutine[Any, Any, bool]], + mac: bytes | None = None, + message_ids: tuple[bytes] | None = None, + seq_id: bytes | None = None, + ) -> Callable[[], None]: + """Subscribe a awaitable callback to be called when a specific message is received. + + Returns function to unsubscribe. + """ + if self._receiver is None or not self._receiver.is_connected: + raise StickError( + "Unable to subscribe to node response when receiver " + "is not loaded" + ) + return await self._receiver.subscribe_to_node_responses( + node_response_callback, mac, message_ids, seq_id + ) + + async def setup_connection_to_stick(self, serial_path: str) -> None: + """Create serial connection to USB-stick.""" + if self._connected: + raise StickError("Cannot setup connection, already connected") + loop = get_event_loop() + connected_future: Future[bool] = Future() + self._receiver = StickReceiver(connected_future) + self._port = serial_path + + try: + ( + self._serial_transport, + self._receiver, + ) = await wait_for( + create_serial_connection( + loop, + lambda: self._receiver, + url=serial_path, + baudrate=115200, + bytesize=EIGHTBITS, + stopbits=STOPBITS_ONE, + parity=PARITY_NONE, + xonxoff=False, + ), + timeout=5, + ) + except SerialException as err: + raise StickError( + f"Failed to open serial connection to {serial_path}" + ) from err + except TimeoutError as err: + raise StickError( + f"Failed to open serial connection to {serial_path}" + ) from err + + if self._receiver is None: + raise StickError("Protocol is not loaded") + self._sender = StickSender(self._receiver, self._serial_transport) + await self._sender.start() + await connected_future + if connected_future.result(): + await self._handle_stick_event(StickEvent.CONNECTED) + self._connected = True + self._subscribe_to_stick_events() + + async def write_to_stick(self, request: PlugwiseRequest) -> None: + """Write message to USB stick.""" + _LOGGER.debug("Write to USB-stick: %s", request) + if not request.resend: + raise StickError( + f"Failed to send {request.__class__.__name__} " + + f"to node {request.mac_decoded}, maximum number " + + f"of retries ({request.max_retries}) has been reached" + ) + if self._sender is None: + raise StickError( + f"Failed to send {request.__class__.__name__}" + + "because USB-Stick connection is not setup" + ) + await self._sender.write_request_to_port(request) + + async def disconnect_from_stick(self) -> None: + """Disconnect from USB-Stick.""" + _LOGGER.debug("Disconnecting manager") + if self._unsubscribe_stick_events is not None: + self._unsubscribe_stick_events() + self._unsubscribe_stick_events = None + self._connected = False + if self._sender is not None: + self._sender.stop() + if self._receiver is not None: + await self._receiver.close() + self._receiver = None + _LOGGER.debug("Manager disconnected") diff --git a/plugwise_usb/connection/py.typed b/plugwise_usb/connection/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/plugwise_usb/connection/queue.py b/plugwise_usb/connection/queue.py new file mode 100644 index 000000000..4aa075a9f --- /dev/null +++ b/plugwise_usb/connection/queue.py @@ -0,0 +1,149 @@ +"""Manage the communication sessions towards the USB-Stick.""" + +from __future__ import annotations + +from asyncio import PriorityQueue, Task, get_running_loop, sleep +from collections.abc import Callable +from dataclasses import dataclass +import logging + +from ..api import StickEvent +from ..exceptions import MessageError, NodeTimeout, StickError, StickTimeout +from ..messages import Priority +from ..messages.requests import NodePingRequest, PlugwiseCancelRequest, PlugwiseRequest +from ..messages.responses import PlugwiseResponse +from .manager import StickConnectionManager + +_LOGGER = logging.getLogger(__name__) + + +@dataclass +class RequestState: + """Node hardware information.""" + + session: bytes + zigbee_address: int + + +class StickQueue: + """Manage queue of all request sessions.""" + + def __init__(self) -> None: + """Initialize the message session controller.""" + self._stick: StickConnectionManager | None = None + self._loop = get_running_loop() + self._submit_queue: PriorityQueue[PlugwiseRequest] = PriorityQueue() + self._submit_worker_task: Task[None] | None = None + self._unsubscribe_connection_events: Callable[[], None] | None = None + self._running = False + + @property + def is_running(self) -> bool: + """Return the state of the queue.""" + return self._running + + def start(self, stick_connection_manager: StickConnectionManager) -> None: + """Start sending request from queue.""" + if self._running: + raise StickError("Cannot start queue manager, already running") + self._stick = stick_connection_manager + if self._stick.is_connected: + self._running = True + self._unsubscribe_connection_events = self._stick.subscribe_to_stick_events( + self._handle_stick_event, (StickEvent.CONNECTED, StickEvent.DISCONNECTED) + ) + + async def _handle_stick_event(self, event: StickEvent) -> None: + """Handle events from stick.""" + if event is StickEvent.CONNECTED: + self._running = True + elif event is StickEvent.DISCONNECTED: + self._running = False + + async def stop(self) -> None: + """Stop sending from queue.""" + _LOGGER.debug("Stop queue") + if self._unsubscribe_connection_events is not None: + self._unsubscribe_connection_events() + self._running = False + if self._submit_worker_task is not None and not self._submit_worker_task.done(): + cancel_request = PlugwiseCancelRequest() + await self._submit_queue.put(cancel_request) + await self._submit_worker_task + self._submit_worker_task = None + self._stick = None + _LOGGER.debug("queue stopped") + + async def submit(self, request: PlugwiseRequest) -> PlugwiseResponse | None: + """Add request to queue and return the response of node. Raises an error when something fails.""" + if request.waiting_for_response: + raise MessageError( + f"Cannot send message {request} which is currently waiting for response." + ) + + while request.resend and not request.waiting_for_response: + _LOGGER.debug("submit | start (%s) %s", request.retries_left, request) + if not self._running or self._stick is None: + raise StickError( + f"Cannot send message {request.__class__.__name__} for" + + f"{request.mac_decoded} because queue manager is stopped" + ) + await self._add_request_to_queue(request) + try: + response: PlugwiseResponse = await request.response_future() + return response + except (NodeTimeout, StickTimeout) as e: + if isinstance(request, NodePingRequest): + # For ping requests it is expected to receive timeouts, so lower log level + _LOGGER.debug( + "%s, cancel because timeout is expected for NodePingRequests", e + ) + elif request.resend: + _LOGGER.debug("%s, retrying", e) + else: + _LOGGER.warning("%s, cancel request", e) # type: ignore[unreachable] + except StickError as exception: + _LOGGER.error(exception) + self._stick.correct_received_messages(1) + raise StickError( + f"No response received for {request.__class__.__name__} " + + f"to {request.mac_decoded}" + ) from exception + except BaseException as exception: + self._stick.correct_received_messages(1) + raise StickError( + f"No response received for {request.__class__.__name__} " + + f"to {request.mac_decoded}" + ) from exception + + return None + + async def _add_request_to_queue(self, request: PlugwiseRequest) -> None: + """Add request to send queue.""" + _LOGGER.debug("Add request to queue: %s", request) + await self._submit_queue.put(request) + if self._submit_worker_task is None or self._submit_worker_task.done(): + self._submit_worker_task = self._loop.create_task( + self._send_queue_worker(), name="Send queue worker" + ) + + async def _send_queue_worker(self) -> None: + """Send messages from queue at the order of priority.""" + _LOGGER.debug("Send_queue_worker started") + while self._running and self._stick is not None: + request = await self._submit_queue.get() + _LOGGER.debug("Send from send queue %s", request) + if request.priority == Priority.CANCEL: + self._submit_queue.task_done() + return + + if self._stick.queue_depth > 3: + await sleep(0.125) + if self._stick.queue_depth > 3: + _LOGGER.warning("Awaiting plugwise responses %d", self._stick.queue_depth) + + await self._stick.write_to_stick(request) + self._submit_queue.task_done() + + _LOGGER.debug("Sent from queue %s", request) + _LOGGER.debug("Send_queue_worker stopped") diff --git a/plugwise_usb/connection/receiver.py b/plugwise_usb/connection/receiver.py new file mode 100644 index 000000000..7ea263038 --- /dev/null +++ b/plugwise_usb/connection/receiver.py @@ -0,0 +1,515 @@ +"""Receive data from USB-Stick. + +Process incoming data stream from the Plugwise USB-Stick and +convert it into response messages. + +Responsible to + + 1. Collect and buffer raw data received from Stick: data_received() + 2. Convert raw data into response message: parse_data() + 3. Forward response message to the message subscribers + +and publish detected connection status changes + + 1. Notify status subscribers to connection state changes + +""" + +from __future__ import annotations + +from asyncio import ( + Future, + Lock, + PriorityQueue, + Protocol, + Queue, + Task, + gather, + get_running_loop, + sleep, +) +from collections.abc import Awaitable, Callable, Coroutine +from dataclasses import dataclass +import logging +from typing import Any, Final + +from serial_asyncio_fast import SerialTransport + +from ..api import StickEvent +from ..constants import MESSAGE_FOOTER, MESSAGE_HEADER +from ..exceptions import MessageError +from ..messages import Priority +from ..messages.responses import ( + BROADCAST_IDS, + PlugwiseResponse, + StickResponse, + StickResponseType, + get_message_object, +) + +_LOGGER = logging.getLogger(__name__) +STICK_RECEIVER_EVENTS = (StickEvent.CONNECTED, StickEvent.DISCONNECTED) +CACHED_REQUESTS: Final = 50 + + +@dataclass +class StickEventSubscription: + """Subscription registration details for stick responses.""" + + callback_fn: Callable[[StickEvent], Coroutine[Any, Any, None]] + stick_events: tuple[StickEvent, ...] + + +@dataclass +class StickResponseSubscription: + """Subscription registration details for stick responses.""" + + callback_fn: Callable[[StickResponse], Coroutine[Any, Any, None]] + seq_id: bytes | None + stick_response_type: tuple[StickResponseType, ...] | None + + +@dataclass +class NodeResponseSubscription: + """Subscription registration details for node responses.""" + + callback_fn: Callable[[PlugwiseResponse], Coroutine[Any, Any, bool]] + mac: bytes | None + response_ids: tuple[bytes, ...] | None + seq_id: bytes | None + + +class StickReceiver(Protocol): + """Receive data from USB Stick connection and convert it into response messages.""" + + def __init__( + self, + connected_future: Future[bool] | None = None, + ) -> None: + """Initialize instance of the USB Stick connection.""" + super().__init__() + self._loop = get_running_loop() + self._connected_future = connected_future + self._transport: SerialTransport | None = None + self._connection_state = False + + # Data processing + self._buffer: bytes = bytes([]) + self._data_queue: Queue[bytes] = Queue() + self._data_worker_task: Task[None] | None = None + + # Message processing + self._processed_msgs = 0 + self._message_queue: PriorityQueue[PlugwiseResponse] = PriorityQueue() + self._last_processed_messages: list[bytes] = [] + self._current_seq_id: bytes | None = None + self._responses: dict[bytes, Callable[[PlugwiseResponse], None]] = {} + self._message_worker_task: Task[None] | None = None + self._delayed_processing_tasks: dict[bytes, Task[None]] = {} + + # Subscribers + self._stick_subscription_lock = Lock() + self._node_subscription_lock = Lock() + + self._stick_event_subscribers: dict[ + Callable[[], None], StickEventSubscription + ] = {} + self._stick_subscribers_for_requests: dict[ + Callable[[], None], StickResponseSubscription + ] = {} + self._stick_subscribers_for_responses: dict[ + Callable[[], None], StickResponseSubscription + ] = {} + + self._node_response_subscribers: dict[ + Callable[[], None], NodeResponseSubscription + ] = {} + + def connection_lost(self, exc: Exception | None = None) -> None: + """Call when port was closed expectedly or unexpectedly.""" + _LOGGER.warning("Connection lost") + if exc is not None: + _LOGGER.warning("Connection to Plugwise USB-stick lost %s", exc) + self._loop.create_task(self.close()) + if len(self._stick_event_subscribers) > 0: + self._loop.create_task( + self._notify_stick_event_subscribers(StickEvent.DISCONNECTED) + ) + self._transport = None + self._connection_state = False + + @property + def processed_messages(self) -> int: + """Return the number of processed messages.""" + return self._processed_msgs + + @property + def is_connected(self) -> bool: + """Return current connection state of the USB-Stick.""" + return self._connection_state + + def correct_processed_messages(self, correction: int) -> None: + """Return the number of processed messages.""" + self._processed_msgs += correction + + def connection_made(self, transport: SerialTransport) -> None: + """Call when the serial connection to USB-Stick is established.""" + _LOGGER.info("Connection made") + self._transport = transport + if self._connected_future is not None and not self._connected_future.done(): + self._connected_future.set_result(True) + self._connection_state = True + if len(self._stick_event_subscribers) > 0: + self._loop.create_task( + self._notify_stick_event_subscribers(StickEvent.CONNECTED) + ) + + async def close(self) -> None: + """Close connection.""" + await self._stop_running_tasks() + if self._transport: + self._transport.close() + + async def _stop_running_tasks(self) -> None: + """Cancel and stop any running task.""" + for task in self._delayed_processing_tasks.values(): + task.cancel() + if ( + self._message_worker_task is not None + and not self._message_worker_task.done() + ): + cancel_response = StickResponse() + cancel_response.priority = Priority.CANCEL + await self._message_queue.put(cancel_response) + await self._message_worker_task + self._message_worker_task = None + + if self._data_worker_task is not None and not self._data_worker_task.done(): + await self._data_queue.put(b"FFFFFFFF") + await self._data_worker_task + self._data_worker_task = None + + # region Process incoming data + + def data_received(self, data: bytes) -> None: + """Receive data from USB-Stick connection. + + This function is called by inherited asyncio.Protocol class + """ + _LOGGER.debug("Received data from USB-Stick: %s", data) + self._buffer += data + if MESSAGE_FOOTER in self._buffer: + data_of_messages = self._buffer.split(MESSAGE_FOOTER) + for msg_data in data_of_messages[:-1]: + # Ignore ASCII messages without a header and footer like: + # # SENDING PING UNICAST: Macid: ???????????????? + # # HANDLE: 0x?? + # # APSRequestNodeInfo + if (header_index := msg_data.find(MESSAGE_HEADER)) != -1: + data = msg_data[header_index:] + self._put_data_in_queue(data) + if len(data_of_messages) > 4: + _LOGGER.debug( + "Reading %d messages at once from USB-Stick", len(data_of_messages) + ) + self._buffer = data_of_messages[-1] # whatever was left over + + def _put_data_in_queue(self, data: bytes) -> None: + """Put raw message data in queue to be converted to messages.""" + self._data_queue.put_nowait(data) + if self._data_worker_task is None or self._data_worker_task.done(): + self._data_worker_task = self._loop.create_task( + self._data_queue_worker(), name="Plugwise data receiver queue worker" + ) + + async def _data_queue_worker(self) -> None: + """Convert collected data into messages and place then im message queue.""" + _LOGGER.debug("Data queue worker started") + while self.is_connected: + if (data := await self._data_queue.get()) != b"FFFFFFFF": + if (response := self.extract_message_from_data(data)) is not None: + await self._put_message_in_queue(response) + self._data_queue.task_done() + else: + self._data_queue.task_done() + return + await sleep(0) + _LOGGER.debug("Data queue worker stopped") + + def extract_message_from_data(self, msg_data: bytes) -> PlugwiseResponse | None: + """Extract message from buffer.""" + identifier = msg_data[4:8] + seq_id = msg_data[8:12] + msg_data_length = len(msg_data) + if ( + response := get_message_object(identifier, msg_data_length, seq_id) + ) is None: + _raw_msg_data_data = msg_data[2:][: msg_data_length - 4] + _LOGGER.warning("Drop unknown message type %s", str(_raw_msg_data_data)) + return None + + # Populate response message object with data + try: + response.deserialize(msg_data, has_footer=False) + except MessageError as err: + _LOGGER.warning(err) + return None + + _LOGGER.debug("Data %s converted into %s", msg_data, response) + return response + + # endregion + + # region Process incoming messages + + async def _put_message_in_queue( + self, response: PlugwiseResponse, delay: float = 0.0 + ) -> None: + """Put message in queue to be processed.""" + if delay > 0.0: + await sleep(delay) + _LOGGER.debug("Add response to queue: %s", response) + await self._message_queue.put(response) + if self._message_worker_task is None or self._message_worker_task.done(): + self._message_worker_task = self._loop.create_task( + self._message_queue_worker(), + name="Plugwise message receiver queue worker", + ) + + async def _message_queue_worker(self) -> None: + """Process messages in receiver queue.""" + _LOGGER.debug("Message queue worker started") + while self.is_connected: + response: PlugwiseResponse = await self._message_queue.get() + if response.priority == Priority.CANCEL: + self._message_queue.task_done() + return + _LOGGER.debug("Message queue worker queue: %s", response) + if isinstance(response, StickResponse): + await self._notify_stick_subscribers(response) + else: + await self._notify_node_response_subscribers(response) + self._processed_msgs += 1 + self._message_queue.task_done() + await sleep(0) + _LOGGER.debug("Message queue worker stopped") + + # endregion + + # region Stick + + def subscribe_to_stick_events( + self, + stick_event_callback: Callable[[StickEvent], Coroutine[Any, Any, None]], + events: tuple[StickEvent, ...], + ) -> Callable[[], None]: + """Subscribe callback when specified StickEvent occurs. + + Returns the function to be called to unsubscribe later. + """ + + def remove_subscription() -> None: + """Remove stick event subscription.""" + self._stick_event_subscribers.pop(remove_subscription) + + self._stick_event_subscribers[remove_subscription] = StickEventSubscription( + stick_event_callback, events + ) + return remove_subscription + + async def _notify_stick_event_subscribers( + self, + event: StickEvent, + ) -> None: + """Call callback for stick event subscribers.""" + callback_list: list[Awaitable[None]] = [] + for subscription in self._stick_event_subscribers.values(): + if event in subscription.stick_events: + callback_list.append(subscription.callback_fn(event)) + if len(callback_list) > 0: + await gather(*callback_list) + + async def subscribe_to_stick_responses( + self, + callback: Callable[[StickResponse], Coroutine[Any, Any, None]], + seq_id: bytes | None = None, + response_type: tuple[StickResponseType, ...] | None = None, + ) -> Callable[[], None]: + """Subscribe to response messages from stick.""" + + def remove_subscription_for_requests() -> None: + """Remove update listener.""" + self._stick_subscribers_for_requests.pop(remove_subscription_for_requests) + + def remove_subscription_for_responses() -> None: + """Remove update listener.""" + self._stick_subscribers_for_responses.pop(remove_subscription_for_responses) + + if seq_id is None: + await self._stick_subscription_lock.acquire() + self._stick_subscribers_for_requests[remove_subscription_for_requests] = ( + StickResponseSubscription(callback, seq_id, response_type) + ) + self._stick_subscription_lock.release() + return remove_subscription_for_requests + + self._stick_subscribers_for_responses[remove_subscription_for_responses] = ( + StickResponseSubscription(callback, seq_id, response_type) + ) + return remove_subscription_for_responses + + async def _notify_stick_subscribers(self, stick_response: StickResponse) -> None: + """Call callback for all stick response message subscribers.""" + await self._stick_subscription_lock.acquire() + for subscription in self._stick_subscribers_for_requests.values(): + if ( + subscription.seq_id is not None + and subscription.seq_id != stick_response.seq_id + ): + continue + if ( + subscription.stick_response_type is not None + and stick_response.response_type not in subscription.stick_response_type + ): + continue + _LOGGER.debug("Notify stick request subscriber for %s", stick_response) + await subscription.callback_fn(stick_response) + self._stick_subscription_lock.release() + + for subscription in list(self._stick_subscribers_for_responses.values()): + if ( + subscription.seq_id is not None + and subscription.seq_id != stick_response.seq_id + ): + continue + if ( + subscription.stick_response_type is not None + and stick_response.response_type not in subscription.stick_response_type + ): + continue + _LOGGER.debug("Notify stick response subscriber for %s", stick_response) + await subscription.callback_fn(stick_response) + _LOGGER.debug( + "Finished Notify stick response subscriber for %s", stick_response + ) + + # endregion + # region node + + async def subscribe_to_node_responses( + self, + node_response_callback: Callable[[PlugwiseResponse], Coroutine[Any, Any, bool]], + mac: bytes | None = None, + message_ids: tuple[bytes, ...] | None = None, + seq_id: bytes | None = None, + ) -> Callable[[], None]: + """Subscribe a awaitable callback to be called when a specific message is received. + + Returns function to unsubscribe. + """ + await self._node_subscription_lock.acquire() + + def remove_listener() -> None: + """Remove update listener.""" + _LOGGER.debug( + "Node response subscriber removed: mac=%s, msg_idS=%s, seq_id=%s", + mac, + message_ids, + seq_id, + ) + self._node_response_subscribers.pop(remove_listener) + + self._node_response_subscribers[remove_listener] = NodeResponseSubscription( + callback_fn=node_response_callback, + mac=mac, + response_ids=message_ids, + seq_id=seq_id, + ) + self._node_subscription_lock.release() + _LOGGER.debug( + "Node response subscriber added: mac=%s, msg_idS=%s, seq_id=%s", + mac, + message_ids, + seq_id, + ) + _LOGGER.debug("node subscription created for %s - %s", mac, seq_id) + return remove_listener + + async def _notify_node_response_subscribers( + self, node_response: PlugwiseResponse + ) -> None: + """Call callback for all node response message subscribers.""" + if node_response.seq_id is None: + return + + if node_response.seq_id in self._last_processed_messages: + _LOGGER.debug("Drop previously processed duplicate %s", node_response) + return + + await self._node_subscription_lock.acquire() + + notify_tasks: list[Coroutine[Any, Any, bool]] = [] + for node_subscription in self._node_response_subscribers.values(): + if ( + node_subscription.mac is not None + and node_subscription.mac != node_response.mac + ): + continue + if ( + node_subscription.response_ids is not None + and node_response.identifier not in node_subscription.response_ids + ): + continue + if ( + node_subscription.seq_id is not None + and node_subscription.seq_id != node_response.seq_id + ): + continue + notify_tasks.append(node_subscription.callback_fn(node_response)) + + self._node_subscription_lock.release() + if len(notify_tasks) > 0: + _LOGGER.info("Received %s %s", node_response, node_response.seq_id) + if node_response.seq_id not in BROADCAST_IDS: + self._last_processed_messages.append(node_response.seq_id) + # Limit tracking to only the last appended request (FIFO) + self._last_processed_messages = self._last_processed_messages[ + -CACHED_REQUESTS: + ] + + # Cleanup pending task + if node_response.seq_id in self._delayed_processing_tasks: + del self._delayed_processing_tasks[node_response.seq_id] + + # execute callbacks + _LOGGER.debug( + "Notify node response subscribers (%s) about %s", + len(notify_tasks), + node_response, + ) + task_result = await gather(*notify_tasks) + + # Log execution result for special cases + if not all(task_result): + _LOGGER.warning( + "Executed %s tasks (result=%s) for %s", + len(notify_tasks), + task_result, + node_response, + ) + return + + if node_response.retries > 10: + _LOGGER.warning( + "No subscriber to handle %s after 10 retries", + node_response, + ) + return + node_response.retries += 1 + self._delayed_processing_tasks[node_response.seq_id] = self._loop.create_task( + self._put_message_in_queue(node_response, 0.1 * node_response.retries), + name=f"Postpone subscription task for {node_response.seq_id!r} retry {node_response.retries}", + ) + + +# endregion diff --git a/plugwise_usb/connection/sender.py b/plugwise_usb/connection/sender.py new file mode 100644 index 000000000..007103a25 --- /dev/null +++ b/plugwise_usb/connection/sender.py @@ -0,0 +1,155 @@ +"""Send data to USB-Stick. + +Serialize request message and pass data stream to legacy Plugwise USB-Stick +Wait for stick to respond. +When request is accepted by USB-Stick, return the Sequence ID of the session. + +process flow + +1. Send(request) +1. wait for lock +1. convert (serialize) request message into bytes +1. send data to serial port +1. wait for stick reply (accept, timeout, failed) +1. when accept, return sequence id for response message of node + +""" + +from __future__ import annotations + +from asyncio import Future, Lock, Transport, get_running_loop, timeout +from collections.abc import Callable +import logging + +from ..constants import STICK_TIME_OUT +from ..exceptions import StickError +from ..messages.requests import PlugwiseRequest +from ..messages.responses import StickResponse, StickResponseType +from .receiver import StickReceiver + +_LOGGER = logging.getLogger(__name__) + + +class StickSender: + """Send request messages though USB Stick transport connection.""" + + def __init__(self, stick_receiver: StickReceiver, transport: Transport) -> None: + """Initialize the Stick Sender class.""" + self._loop = get_running_loop() + self._receiver = stick_receiver + self._transport = transport + self._processed_msgs = 0 + self._stick_response: Future[StickResponse] | None = None + self._stick_lock = Lock() + self._current_request: None | PlugwiseRequest = None + self._unsubscribe_stick_response: Callable[[], None] | None = None + + @property + def processed_messages(self) -> int: + """Return the number of processed messages.""" + return self._processed_msgs + + async def start(self) -> None: + """Start the sender.""" + # Subscribe to ACCEPT stick responses, which contain the seq_id we need. + # Other stick responses are not related to this request. + self._unsubscribe_stick_response = ( + await self._receiver.subscribe_to_stick_responses( + self._process_stick_response, + None, + ( + StickResponseType.ACCEPT, + StickResponseType.TIMEOUT, + StickResponseType.FAILED, + ), + ) + ) + + async def write_request_to_port(self, request: PlugwiseRequest) -> None: + """Send message to serial port of USB stick.""" + if self._transport is None: + raise StickError("USB-Stick transport missing.") + + await self._stick_lock.acquire() + self._current_request = request + self._stick_response = self._loop.create_future() + + request.add_send_attempt() + _LOGGER.info("Send %s", request) + + # Write message to serial port buffer + serialized_data = request.serialize() + _LOGGER.debug("write_request_to_port | Write %s to port as %s", request, serialized_data) + self._transport.write(serialized_data) + request.start_response_timeout() + + # Wait for USB stick to accept request + try: + async with timeout(STICK_TIME_OUT): + response: StickResponse = await self._stick_response + except TimeoutError: + _LOGGER.warning( + "USB-Stick did not respond within %s seconds after writing %s", + STICK_TIME_OUT, + request, + ) + request.assign_error( + BaseException( + StickError( + f"USB-Stick did not respond within {STICK_TIME_OUT} seconds after writing {request}" + ) + ) + ) + except BaseException as exc: # pylint: disable=broad-exception-caught + _LOGGER.warning("Exception for %s: %s", request, exc) + request.assign_error(exc) + else: + _LOGGER.debug("write_request_to_port | USB-Stick replied with %s to request %s", response, request) + if response.response_type == StickResponseType.ACCEPT: + if request.seq_id is not None: + request.assign_error( + BaseException( + StickError(f"USB-Stick failed communication for {request}") + ) + ) + else: + request.seq_id = response.seq_id + await request.subscribe_to_response( + self._receiver.subscribe_to_stick_responses, + self._receiver.subscribe_to_node_responses, + ) + _LOGGER.debug("write_request_to_port | request has subscribed : %s", request) + elif response.response_type == StickResponseType.TIMEOUT: + _LOGGER.warning( + "USB-Stick directly responded with communication timeout for %s", + request, + ) + request.assign_error( + BaseException( + StickError(f"USB-Stick responded with timeout for {request}") + ) + ) + elif response.response_type == StickResponseType.FAILED: + _LOGGER.warning("USB-Stick failed communication for %s", request) + request.assign_error( + BaseException( + StickError(f"USB-Stick failed communication for {request}") + ) + ) + finally: + self._stick_response.cancel() + self._stick_lock.release() + self._processed_msgs += 1 + + async def _process_stick_response(self, response: StickResponse) -> None: + """Process stick response.""" + if self._stick_response is None or self._stick_response.done(): + _LOGGER.warning("No open request for %s", str(response)) + return + _LOGGER.debug("Received %s as reply to %s", response, self._current_request) + self._stick_response.set_result(response) + + def stop(self) -> None: + """Stop sender.""" + if self._unsubscribe_stick_response is not None: + self._unsubscribe_stick_response() diff --git a/plugwise_usb/connections/__init__.py b/plugwise_usb/connections/__init__.py deleted file mode 100644 index cd63debe9..000000000 --- a/plugwise_usb/connections/__init__.py +++ /dev/null @@ -1,136 +0,0 @@ -"""Base class for serial or socket connections to USB-Stick.""" -import logging -import queue -import threading -import time - -from ..constants import SLEEP_TIME -from ..messages.requests import NodeRequest - -_LOGGER = logging.getLogger(__name__) - - -class StickConnection: - """Generic Plugwise stick connection.""" - - def __init__(self, port, parser): - """Initialize StickConnection.""" - self.port = port - self.parser = parser - self.run_reader_thread = False - self.run_writer_thread = False - self._is_connected = False - self._writer = None - - self._reader_thread = None - self._write_queue = None - self._writer_thread = None - - ################################################ - # Open connection # - ################################################ - - def connect(self) -> bool: - """Open the connection.""" - if not self._is_connected: - self._open_connection() - return self._is_connected - - def _open_connection(self): - """Placeholder.""" - - ################################################ - # Reader # - ################################################ - - def _reader_start(self, name): - """Start the reader thread to receive data.""" - self._reader_thread = threading.Thread(None, self._reader_deamon, name, (), {}) - self.run_reader_thread = True - self._reader_thread.start() - - def _reader_deamon(self): - """Thread to collect available data from connection.""" - while self.run_reader_thread: - if data := self._read_data(): - self.parser(data) - time.sleep(0.01) - _LOGGER.debug("Reader daemon stopped") - - # TODO: 20220125 function instead of self - def _read_data(self): - """placeholder.""" - return b"0000" - - ################################################ - # Writer # - ################################################ - - def _writer_start(self, name: str): - """Start the writer thread to send data.""" - self._write_queue = queue.Queue() - self._writer_thread = threading.Thread(None, self._writer_daemon, name, (), {}) - self._writer_thread.daemon = True - self.run_writer_thread = True - self._writer_thread.start() - - def _writer_daemon(self): - """Thread to write data from queue to existing connection.""" - while self.run_writer_thread: - try: - (message, callback) = self._write_queue.get(block=True, timeout=1) - except queue.Empty: - time.sleep(SLEEP_TIME) - else: - _LOGGER.debug( - "Sending %s to plugwise stick (%s)", - message.__class__.__name__, - message.serialize(), - ) - self._write_data(message.serialize()) - time.sleep(SLEEP_TIME) - if callback: - callback() - _LOGGER.debug("Writer daemon stopped") - - def _write_data(self, data): - """Placeholder.""" - - def send(self, message: NodeRequest, callback=None): - """Add message to write queue.""" - self._write_queue.put_nowait((message, callback)) - - ################################################ - # Connection state # - ################################################ - - def is_connected(self): - """Return connection state.""" - return self._is_connected - - def read_thread_alive(self): - """Return state of write thread.""" - return self._reader_thread.is_alive() if self.run_reader_thread else False - - def write_thread_alive(self): - """Return state of write thread.""" - return self._writer_thread.is_alive() if self.run_writer_thread else False - - ################################################ - # Close connection # - ################################################ - - def disconnect(self): - """Close the connection.""" - if self._is_connected: - self._is_connected = False - self.run_writer_thread = False - self.run_reader_thread = False - max_wait = 5 * SLEEP_TIME - while self._writer_thread.is_alive(): - time.sleep(SLEEP_TIME) - max_wait -= SLEEP_TIME - self._close_connection() - - def _close_connection(self): - """Placeholder.""" diff --git a/plugwise_usb/connections/serial.py b/plugwise_usb/connections/serial.py deleted file mode 100644 index 082247627..000000000 --- a/plugwise_usb/connections/serial.py +++ /dev/null @@ -1,88 +0,0 @@ -"""Serial connection.""" -import logging - -import serial - -from ..connections import StickConnection -from ..constants import BAUD_RATE, BYTE_SIZE, STOPBITS -from ..exceptions import PortError - -_LOGGER = logging.getLogger(__name__) - - -class PlugwiseUSBConnection(StickConnection): - """Simple wrapper around serial module.""" - - def __init__(self, port, parser): - super().__init__(port, parser) - self._baud = BAUD_RATE - self._byte_size = BYTE_SIZE - self._stopbits = STOPBITS - self._parity = serial.PARITY_NONE - - self._serial = None - - def _open_connection(self): - """Open serial port.""" - _LOGGER.debug("Open serial port %s", self.port) - try: - self._serial = serial.Serial( - port=self.port, - baudrate=self._baud, - bytesize=self._byte_size, - parity=self._parity, - stopbits=self._stopbits, - timeout=1, - ) - except serial.serialutil.SerialException as err: - _LOGGER.debug( - "Failed to connect to serial port %s, %s", - self.port, - err, - ) - raise PortError(err) - self._is_connected = self._serial.isOpen() - if self._is_connected: - self._reader_start("serial_reader_thread") - self._writer_start("serial_writer_thread") - _LOGGER.debug("Successfully connected to serial port %s", self.port) - else: - _LOGGER.error( - "Failed to open serial port %s", - self.port, - ) - - def _close_connection(self): - """Close serial port.""" - try: - self._serial.close() - except serial.serialutil.SerialException as err: - _LOGGER.debug( - "Failed to close serial port %s, %s", - self.port, - err, - ) - raise PortError(err) - - def _read_data(self): - """Read thread.""" - if self._is_connected: - try: - serial_data = self._serial.read_all() - except serial.serialutil.SerialException as err: - _LOGGER.debug("Error while reading data from serial port : %s", err) - self._is_connected = False - raise PortError(err) - except Exception as err: # pylint: disable=broad-except - _LOGGER.debug("Error _read_data : %s", err) - return serial_data - return None - - def _write_data(self, data): - """Write data to serial port.""" - try: - self._serial.write(data) - except serial.serialutil.SerialException as err: - _LOGGER.debug("Error while writing data to serial port : %s", err) - self._is_connected = False - raise PortError(err) diff --git a/plugwise_usb/connections/socket.py b/plugwise_usb/connections/socket.py deleted file mode 100644 index 990d3e0c1..000000000 --- a/plugwise_usb/connections/socket.py +++ /dev/null @@ -1,88 +0,0 @@ -"""Socket connection.""" -import logging -import socket - -from ..connections import StickConnection -from ..exceptions import PortError - -_LOGGER = logging.getLogger(__name__) - - -class SocketConnection(StickConnection): - """Wrapper for Socket connection configuration.""" - - def __init__(self, port, parser): - super().__init__(port, parser) - # get the address from a : format - port_split = self.port.split(":") - self._socket_host = port_split[0] - self._socket_port = int(port_split[1]) - self._socket_address = (self._socket_host, self._socket_port) - - self._socket = None - - def _open_connection(self): - """Open socket.""" - _LOGGER.debug( - "Open socket to host '%s' at port %s", - self._socket_host, - str(self._socket_port), - ) - try: - self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - self._socket.connect(self._socket_address) - except Exception as err: - _LOGGER.debug( - "Failed to connect to host %s at port %s, %s", - self._socket_host, - str(self._socket_port), - err, - ) - raise PortError(err) - self._reader_start("socket_reader_thread") - self._writer_start("socket_writer_thread") - self._is_connected = True - _LOGGER.debug( - "Successfully connected to host '%s' at port %s", - self._socket_host, - str(self._socket_port), - ) - - def _close_connection(self): - """Close the socket.""" - try: - self._socket.close() - except Exception as err: - _LOGGER.debug( - "Failed to close socket to host %s at port %s, %s", - self._socket_host, - str(self._socket_port), - err, - ) - raise PortError(err) - - def _read_data(self): - """Read data from socket.""" - if self._is_connected: - try: - socket_data = self._socket.recv(9999) - except Exception as err: - _LOGGER.debug( - "Error while reading data from host %s at port %s : %s", - self._socket_host, - str(self._socket_port), - err, - ) - self._is_connected = False - raise PortError(err) - return socket_data - return None - - def _write_data(self, data): - """Write data to socket.""" - try: - self._socket.send(data) - except Exception as err: - _LOGGER.debug("Error while writing data to socket port : %s", err) - self._is_connected = False - raise PortError(err) diff --git a/plugwise_usb/constants.py b/plugwise_usb/constants.py index d19b156eb..df8442103 100644 --- a/plugwise_usb/constants.py +++ b/plugwise_usb/constants.py @@ -1,4 +1,4 @@ -"""Plugwise Stick (power_usb_ constants.""" +"""Plugwise Stick constants.""" from __future__ import annotations import datetime as dt @@ -7,203 +7,77 @@ LOGGER = logging.getLogger(__name__) -# Copied homeassistant.consts -ARBITRARY_DATE: Final = dt.datetime(2022, 5, 14) -ATTR_NAME: Final = "name" -ATTR_STATE: Final = "state" -ATTR_STATE_CLASS: Final = "state_class" -ATTR_UNIT_OF_MEASUREMENT: Final = "unit_of_measurement" -DEGREE: Final = "°" -ELECTRIC_POTENTIAL_VOLT: Final = "V" -ENERGY_KILO_WATT_HOUR: Final = "kWh" -ENERGY_WATT_HOUR: Final = "Wh" -PERCENTAGE: Final = "%" -POWER_WATT: Final = "W" -PRESET_AWAY: Final = "away" -PRESSURE_BAR: Final = "bar" -SIGNAL_STRENGTH_DECIBELS_MILLIWATT: Final = "dBm" -TEMP_CELSIUS: Final = "°C" -TEMP_KELVIN: Final = "°K" -TIME_MILLISECONDS: Final = "ms" -UNIT_LUMEN: Final = "lm" -VOLUME_CUBIC_METERS: Final = "m³" -VOLUME_CUBIC_METERS_PER_HOUR: Final = "m³/h" +# Cache folder name +CACHE_DIR: Final = ".plugwise-cache" +CACHE_KEY_SEPARATOR: str = ";" +CACHE_DATA_SEPARATOR: str = "|" -UTF8_DECODE: Final = "utf-8" -SPECIAL_FORMAT: Final[tuple[str, ...]] = (ENERGY_KILO_WATT_HOUR, VOLUME_CUBIC_METERS) +LOCAL_TIMEZONE = dt.datetime.now(dt.UTC).astimezone().tzinfo +UTF8: Final = "utf-8" -# Serial connection settings for plugwise USB stick -BAUD_RATE: Final = 115200 -BYTE_SIZE: Final = 8 -PARITY: Final = "N" -STOPBITS: Final = 1 +# Time +DAY_IN_HOURS: Final = 24 +WEEK_IN_HOURS: Final = 168 +DAY_IN_MINUTES: Final = 1440 +HOUR_IN_MINUTES: Final = 60 +DAY_IN_SECONDS: Final = 86400 +HOUR_IN_SECONDS: Final = 3600 +MINUTE_IN_SECONDS: Final = 60 +SECOND_IN_NANOSECONDS: Final = 1000000000 # Plugwise message identifiers MESSAGE_FOOTER: Final = b"\x0d\x0a" MESSAGE_HEADER: Final = b"\x05\x05\x03\x03" -MESSAGE_LARGE: Final = "LARGE" -MESSAGE_SMALL: Final = "SMALL" - -# Acknowledge message types - -# NodeAckSmallResponse -RESPONSE_TYPE_SUCCESS: Final = b"00C1" -RESPONSE_TYPE_ERROR: Final = b"00C2" -RESPONSE_TYPE_TIMEOUT: Final = b"00E1" - -# NodeAckLargeResponse -CLOCK_SET: Final = b"00D7" -JOIN_REQUEST_ACCEPTED: Final = b"00D9" -RELAY_SWITCHED_OFF: Final = b"00DE" -RELAY_SWITCHED_ON: Final = b"00D8" -RELAY_SWITCH_FAILED: Final = b"00E2" -SLEEP_SET: Final = b"00F6" -SLEEP_FAILED: Final = b"00F7" # TODO: Validate -REAL_TIME_CLOCK_ACCEPTED: Final = b"00DF" -REAL_TIME_CLOCK_FAILED: Final = b"00E7" - -# NodeAckResponse -SCAN_CONFIGURE_ACCEPTED: Final = b"00BE" -SCAN_CONFIGURE_FAILED: Final = b"00BF" -SCAN_LIGHT_CALIBRATION_ACCEPTED: Final = b"00BD" -SENSE_INTERVAL_ACCEPTED: Final = b"00B3" -SENSE_INTERVAL_FAILED: Final = b"00B4" -SENSE_BOUNDARIES_ACCEPTED: Final = b"00B5" -SENSE_BOUNDARIES_FAILED: Final = b"00B6" - -STATE_ACTIONS = ( - RELAY_SWITCHED_ON, - RELAY_SWITCHED_OFF, - SCAN_CONFIGURE_ACCEPTED, - SLEEP_SET, -) -REQUEST_SUCCESS = ( - CLOCK_SET, - JOIN_REQUEST_ACCEPTED, - REAL_TIME_CLOCK_ACCEPTED, - RELAY_SWITCHED_ON, - RELAY_SWITCHED_OFF, - SCAN_CONFIGURE_ACCEPTED, - SCAN_LIGHT_CALIBRATION_ACCEPTED, - SENSE_BOUNDARIES_ACCEPTED, - SENSE_INTERVAL_ACCEPTED, - SLEEP_SET, -) -REQUEST_FAILED = ( - REAL_TIME_CLOCK_FAILED, - RELAY_SWITCH_FAILED, - RESPONSE_TYPE_ERROR, - RESPONSE_TYPE_TIMEOUT, - SCAN_CONFIGURE_FAILED, - SENSE_BOUNDARIES_FAILED, - SENSE_INTERVAL_FAILED, - SLEEP_FAILED, -) -STATUS_RESPONSES: Final[dict[bytes, str]] = { - # NodeAckSmallResponse - RESPONSE_TYPE_SUCCESS: "success", - RESPONSE_TYPE_ERROR: "error", - RESPONSE_TYPE_TIMEOUT: "timeout", - # NodeAckLargeResponse - CLOCK_SET: "clock set", - JOIN_REQUEST_ACCEPTED: "join accepted", - REAL_TIME_CLOCK_ACCEPTED: "real time clock set", - REAL_TIME_CLOCK_FAILED: "real time clock failed", - RELAY_SWITCHED_ON: "relay on", - RELAY_SWITCHED_OFF: "relay off", - RELAY_SWITCH_FAILED: "relay switching failed", - SLEEP_SET: "sleep settings accepted", - SLEEP_FAILED: "sleep settings failed", - # NodeAckResponse - SCAN_CONFIGURE_ACCEPTED: "Scan settings accepted", - SCAN_CONFIGURE_FAILED: "Scan settings failed", - SENSE_INTERVAL_ACCEPTED: "Sense report interval accepted", - SENSE_INTERVAL_FAILED: "Sense report interval failed", - SENSE_BOUNDARIES_ACCEPTED: "Sense boundaries accepted", - SENSE_BOUNDARIES_FAILED: "Sense boundaries failed", - SCAN_LIGHT_CALIBRATION_ACCEPTED: "Scan light calibration accepted", -} - -# TODO: responses -ACK_POWER_CALIBRATION: Final = b"00DA" -ACK_CIRCLE_PLUS: Final = b"00DD" -ACK_POWER_LOG_INTERVAL_SET: Final = b"00F8" - -# SED Awake status ID -SED_AWAKE_MAINTENANCE: Final = 0 # SED awake for maintenance -SED_AWAKE_FIRST: Final = 1 # SED awake for the first time -SED_AWAKE_STARTUP: Final = ( - 2 # SED awake after restart, e.g. after reinserting a battery -) -SED_AWAKE_STATE: Final = 3 # SED awake to report state (Motion / Temperature / Humidity -SED_AWAKE_UNKNOWN: Final = 4 # TODO: Unknown -SED_AWAKE_BUTTON: Final = 5 # SED awake due to button press # Max timeout in seconds -MESSAGE_TIME_OUT: Final = 15 # Stick responds with timeout messages after 10 sec. -MESSAGE_RETRY: Final = 2 +STICK_TIME_OUT: Final = 11 # Stick responds with timeout messages within 10s. +NODE_TIME_OUT: Final = 15 # In bigger networks a response from a node could take up a while, so lets use 15 seconds. +MAX_RETRIES: Final = 3 +SUPPRESS_INITIALIZATION_WARNINGS: Final = 10 # Minutes to suppress (expected) communication warning messages after initialization # plugwise year information is offset from y2k PLUGWISE_EPOCH: Final = 2000 PULSES_PER_KW_SECOND: Final = 468.9385193 -LOGADDR_OFFSET: Final = 278528 - -# Default sleep between sending messages -SLEEP_TIME: Final = 150 / 1000 -# Message priority levels -PRIORITY_HIGH: Final = 1 -PRIORITY_LOW: Final = 3 -PRIORITY_MEDIUM: Final = 2 +# Energy log memory addresses +LOGADDR_OFFSET: Final = 278528 # = b"00044000" +LOGADDR_MAX: Final = 6016 # last address for energy log # Max seconds the internal clock of plugwise nodes # are allowed to drift in seconds MAX_TIME_DRIFT: Final = 5 -# Default sleep time in seconds for watchdog daemon -WATCHDOG_DEAMON: Final = 60 +# Duration updates of node states +NODE_CACHE: Final = dt.timedelta(seconds=5) -# Automatically accept new join requests -ACCEPT_JOIN_REQUESTS = False - -# Node types -NODE_TYPE_STICK: Final = 0 -NODE_TYPE_CIRCLE_PLUS: Final = 1 # AME_NC -NODE_TYPE_CIRCLE: Final = 2 # AME_NR -NODE_TYPE_SWITCH: Final = 3 # AME_SEDSwitch -NODE_TYPE_SENSE: Final = 5 # AME_SEDSense -NODE_TYPE_SCAN: Final = 6 # AME_SEDScan -NODE_TYPE_CELSIUS_SED: Final = 7 # AME_CelsiusSED -NODE_TYPE_CELSIUS_NR: Final = 8 # AME_CelsiusNR -NODE_TYPE_STEALTH: Final = 9 # AME_STEALTH_ZE -# 10 AME_MSPBOOTLOAD -# 11 AME_STAR +# Minimal time between power updates in seconds +MINIMAL_POWER_UPDATE: Final = 5 # Hardware models based HW_MODELS: Final[dict[str, str]] = { "038500": "Stick", "070085": "Stick", "120002": "Stick Legrand", - "120041": "Circle+ Legrand type E", - "120000": "Circle+ Legrand type F", - "090000": "Circle+ type B", - "090007": "Circle+ type B", - "090088": "Circle+ type E", - "070073": "Circle+ type F", - "090048": "Circle+ type G", + "120041": "Circle + Legrand type E", + "120000": "Circle + Legrand type F", + "090000": "Circle + type B", + "090007": "Circle + type B", + "090088": "Circle + type E", + "070073": "Circle + type F", + "090048": "Circle + type G", + "090188": "Stealth +", "120049": "Stealth M+", - "090188": "Stealth+", + "120029": "Stealth Legrand", + "100025": "Circle", "120040": "Circle Legrand type E", "120001": "Circle Legrand type F", "090079": "Circle type B", "090087": "Circle type E", "070140": "Circle type F", "090093": "Circle type G", - "100025": "Circle", - "120048": "Stealth M", - "120029": "Stealth Legrand", "090011": "Stealth", "001200": "Stealth", + "120048": "Stealth M", "080007": "Scan", "110028": "Scan Legrand", "070030": "Sense", @@ -211,145 +85,3 @@ "070051": "Switch", "080029": "Switch", } - -# Defaults for SED's (Sleeping End Devices) -SED_STAY_ACTIVE: Final = 10 # Time in seconds the SED keep itself awake to receive and respond to other messages -SED_SLEEP_FOR: Final = 60 # Time in minutes the SED will sleep -SED_MAINTENANCE_INTERVAL: Final = 1440 # 24 hours, Interval in minutes the SED will get awake and notify it's available for maintenance purposes -SED_CLOCK_SYNC = True # Enable or disable synchronizing clock -SED_CLOCK_INTERVAL: Final = ( - 25200 # 7 days, duration in minutes the node synchronize its clock -) - - -# Scan motion Sensitivity levels -SCAN_SENSITIVITY_HIGH: Final = "high" -SCAN_SENSITIVITY_MEDIUM: Final = "medium" -SCAN_SENSITIVITY_OFF: Final = "medium" - -# Defaults for Scan Devices -SCAN_MOTION_RESET_TIMER: Final = 5 # Time in minutes the motion sensor should not sense motion to report "no motion" state -SCAN_SENSITIVITY = SCAN_SENSITIVITY_MEDIUM # Default sensitivity of the motion sensors -SCAN_DAYLIGHT_MODE = False # Light override - -# Sense calculations -SENSE_HUMIDITY_MULTIPLIER: Final = 125 -SENSE_HUMIDITY_OFFSET: Final = 6 -SENSE_TEMPERATURE_MULTIPLIER: Final = 175.72 -SENSE_TEMPERATURE_OFFSET: Final = 46.85 - -# Callback types -CB_NEW_NODE: Final = "NEW_NODE" -CB_JOIN_REQUEST: Final = "JOIN_REQUEST" - -# Stick device features -FEATURE_AVAILABLE: Final[dict[str, str]] = { - "id": "available", - "name": "Available", - "state": "available", - "unit": "state", -} -FEATURE_ENERGY_CONSUMPTION_TODAY: Final[dict[str, str]] = { - "id": "energy_consumption_today", - "name": "Energy consumption today", - "state": "Energy_consumption_today", - "unit": ENERGY_KILO_WATT_HOUR, -} -FEATURE_HUMIDITY: Final[dict[str, str]] = { - "id": "humidity", - "name": "Humidity", - "state": "humidity", - "unit": "%", -} -FEATURE_MOTION: Final[dict[str, str]] = { - "id": "motion", - "name": "Motion", - "state": "motion", - "unit": "state", -} -FEATURE_PING: Final[dict[str, str]] = { - "id": "ping", - "name": "Ping roundtrip", - "state": "ping", - "unit": TIME_MILLISECONDS, -} -FEATURE_POWER_USE: Final[dict[str, str]] = { - "id": "power_1s", - "name": "Power usage", - "state": "current_power_usage", - "unit": POWER_WATT, -} -FEATURE_POWER_USE_LAST_8_SEC: Final[dict[str, str]] = { - "id": "power_8s", - "name": "Power usage 8 seconds", - "state": "current_power_usage_8_sec", - "unit": POWER_WATT, -} -FEATURE_POWER_CONSUMPTION_CURRENT_HOUR: Final[dict[str, str]] = { - "id": "power_con_cur_hour", - "name": "Power consumption current hour", - "state": "power_consumption_current_hour", - "unit": ENERGY_KILO_WATT_HOUR, -} -FEATURE_POWER_CONSUMPTION_PREVIOUS_HOUR: Final[dict[str, str]] = { - "id": "power_con_prev_hour", - "name": "Power consumption previous hour", - "state": "power_consumption_previous_hour", - "unit": ENERGY_KILO_WATT_HOUR, -} -FEATURE_POWER_CONSUMPTION_TODAY: Final[dict[str, str]] = { - "id": "power_con_today", - "name": "Power consumption today", - "state": "power_consumption_today", - "unit": ENERGY_KILO_WATT_HOUR, -} -FEATURE_POWER_CONSUMPTION_YESTERDAY: Final[dict[str, str]] = { - "id": "power_con_yesterday", - "name": "Power consumption yesterday", - "state": "power_consumption_yesterday", - "unit": ENERGY_KILO_WATT_HOUR, -} -FEATURE_POWER_PRODUCTION_CURRENT_HOUR: Final[dict[str, str]] = { - "id": "power_prod_cur_hour", - "name": "Power production current hour", - "state": "power_production_current_hour", - "unit": ENERGY_KILO_WATT_HOUR, -} -FEATURE_POWER_PRODUCTION_PREVIOUS_HOUR: Final[dict[str, str]] = { - "id": "power_prod_prev_hour", - "name": "Power production previous hour", - "state": "power_production_previous_hour", - "unit": ENERGY_KILO_WATT_HOUR, -} -FEATURE_RELAY: Final[dict[str, str]] = { - "id": "relay", - "name": "Relay state", - "state": "relay_state", - "unit": "state", -} -FEATURE_SWITCH: Final[dict[str, str]] = { - "id": "switch", - "name": "Switch state", - "state": "switch_state", - "unit": "state", -} -FEATURE_TEMPERATURE: Final[dict[str, str]] = { - "id": "temperature", - "name": "Temperature", - "state": "temperature", - "unit": TEMP_CELSIUS, -} - -# TODO: Need to validate RSSI sensors -FEATURE_RSSI_IN: Final[dict[str, str]] = { - "id": "RSSI_in", - "name": "RSSI in", - "state": "rssi_in", - "unit": "Unknown", -} -FEATURE_RSSI_OUT: Final[dict[str, str]] = { - "id": "RSSI_out", - "name": "RSSI out", - "state": "rssi_out", - "unit": "Unknown", -} diff --git a/plugwise_usb/controller.py b/plugwise_usb/controller.py deleted file mode 100644 index 6fee9e5bc..000000000 --- a/plugwise_usb/controller.py +++ /dev/null @@ -1,442 +0,0 @@ -"""Message controller for USB-Stick - -The controller will: -- handle the connection (connect/disconnect) to the USB-Stick -- take care for message acknowledgements based on sequence id's -- resend message requests when timeouts occurs -- holds a sending queue and submit messages based on the message priority (high, medium, low) -- passes received messages back to message processor (stick.py) -- execution of callbacks after processing the response message - -""" - -from datetime import datetime, timedelta -import logging -from queue import Empty, SimpleQueue -import threading -import time - -from .connections.serial import PlugwiseUSBConnection -from .connections.socket import SocketConnection -from .constants import ( - MESSAGE_RETRY, - MESSAGE_TIME_OUT, - PRIORITY_MEDIUM, - REQUEST_FAILED, - REQUEST_SUCCESS, - SLEEP_TIME, - STATUS_RESPONSES, - UTF8_DECODE, -) -from .messages.requests import NodeInfoRequest, NodePingRequest, NodeRequest -from .messages.responses import ( - NodeAckLargeResponse, - NodeAckResponse, - NodeAckSmallResponse, -) -from .parser import PlugwiseParser -from .util import inc_seq_id - -_LOGGER = logging.getLogger(__name__) - - -class StickMessageController: - """Handle connection and message sending and receiving""" - - def __init__(self, port: str, message_processor, node_state): - """Initialize message controller""" - self.connection = None - self.discovery_finished = False - self.expected_responses = {} - self.lock_expected_responses = threading.Lock() - self.init_callback = None - self.last_seq_id = None - self.message_processor = message_processor - self.node_state = node_state - self.parser = PlugwiseParser(self.message_handler) - self.port = port - - self._send_message_queue = None - self._send_message_thread = None - self._receive_timeout_thread = False - self._receive_timeout_thread_state = False - self._send_message_thread_state = False - - @property - def receive_timeout_thread_state(self) -> bool: - """Required state of the receive timeout thread""" - return self._receive_timeout_thread_state - - @property - def receive_timeout_thread_is_alive(self) -> bool: - """Current state of the receive timeout thread""" - return self._send_message_thread.is_alive() - - @property - def send_message_thread_state(self) -> bool: - """Required state of the send message thread""" - return self._send_message_thread_state - - @property - def send_message_thread_is_alive(self) -> bool: - """Current state of the send message thread""" - return self._send_message_thread.is_alive() - - def connect_to_stick(self, callback=None) -> bool: - """Connect to USB-Stick and startup all worker threads - - Return: True when connection is successful. - """ - self.init_callback = callback - # Open connection to USB Stick - if ":" in self.port: - _LOGGER.debug( - "Open socket connection to %s hosting Plugwise USB stick", self.port - ) - self.connection = SocketConnection(self.port, self.parser.feed) - else: - _LOGGER.debug("Open USB serial connection to Plugwise USB stick") - self.connection = PlugwiseUSBConnection(self.port, self.parser.feed) - if self.connection.connect(): - _LOGGER.debug("Starting message controller threads...") - # send daemon - self._send_message_queue = SimpleQueue() - self._send_message_thread_state = True - self._send_message_thread = threading.Thread( - None, self._send_message_loop, "send_messages_thread", (), {} - ) - self._send_message_thread.daemon = True - self._send_message_thread.start() - # receive timeout daemon - self._receive_timeout_thread_state = True - self._receive_timeout_thread = threading.Thread( - None, self._receive_timeout_loop, "receive_timeout_thread", (), {} - ) - self._receive_timeout_thread.daemon = True - self._receive_timeout_thread.start() - _LOGGER.debug("All message controller threads started") - else: - _LOGGER.warning("Failed to connect to USB stick") - return self.connection.is_connected() - - def send( - self, - request: NodeRequest, - callback=None, - retry_counter=0, - priority=PRIORITY_MEDIUM, - ): - """Queue request message to be sent into Plugwise Zigbee network.""" - _LOGGER.debug( - "Queue %s to be send to %s with retry counter %s and priority %s", - request.__class__.__name__, - request.mac, - str(retry_counter), - str(priority), - ) - self._send_message_queue.put( - ( - priority, - retry_counter, - datetime.now(), - [ - request, - callback, - retry_counter, - None, - ], - ) - ) - - def resend(self, seq_id): - """Resend message.""" - _mac = "" - with self.lock_expected_responses: - if not self.expected_responses.get(seq_id): - _LOGGER.warning( - "Cannot resend unknown request %s", - str(seq_id), - ) - else: - if self.expected_responses[seq_id][0].mac: - _mac = self.expected_responses[seq_id][0].mac.decode(UTF8_DECODE) - _request = self.expected_responses[seq_id][0].__class__.__name__ - - if self.expected_responses[seq_id][2] == -1: - _LOGGER.debug("Drop single %s to %s ", _request, _mac) - elif self.expected_responses[seq_id][2] <= MESSAGE_RETRY: - if ( - isinstance(self.expected_responses[seq_id][0], NodeInfoRequest) - and not self.discovery_finished - ): - # Time out for node which is not discovered yet - # to speedup the initial discover phase skip retries and mark node as not discovered. - _LOGGER.debug( - "Skip retry %s to %s to speedup discover process", - _request, - _mac, - ) - if self.expected_responses[seq_id][1]: - self.expected_responses[seq_id][1]() - else: - _LOGGER.info( - "Resend %s for %s, retry %s of %s", - _request, - _mac, - str(self.expected_responses[seq_id][2] + 1), - str(MESSAGE_RETRY + 1), - ) - self.send( - self.expected_responses[seq_id][0], - self.expected_responses[seq_id][1], - self.expected_responses[seq_id][2] + 1, - ) - else: - _LOGGER.warning( - "Drop %s to %s because max retries %s reached", - _request, - _mac, - str(MESSAGE_RETRY + 1), - ) - # Report node as unavailable for missing NodePingRequest - if isinstance(self.expected_responses[seq_id][0], NodePingRequest): - self.node_state(_mac, False) - else: - _LOGGER.debug( - "Do a single ping request to %s to validate if node is reachable", - _mac, - ) - self.send( - NodePingRequest(self.expected_responses[seq_id][0].mac), - None, - MESSAGE_RETRY + 1, - ) - del self.expected_responses[seq_id] - - def _send_message_loop(self): - """Daemon to send messages waiting in queue.""" - while self._send_message_thread_state: - try: - _prio, _retry, _dt, request_set = self._send_message_queue.get( - block=True, timeout=1 - ) - except Empty: - time.sleep(SLEEP_TIME) - else: - # Calc next seq_id based last received ack message - # if previous seq_id is unknown use fake b"0000" - seq_id = inc_seq_id(self.last_seq_id) - with self.lock_expected_responses: - self.expected_responses[seq_id] = request_set - if self.expected_responses[seq_id][2] == 0: - _LOGGER.info( - "Send %s to %s using seq_id %s", - self.expected_responses[seq_id][0].__class__.__name__, - self.expected_responses[seq_id][0].mac, - str(seq_id), - ) - else: - _LOGGER.info( - "Resend %s to %s using seq_id %s, retry %s", - self.expected_responses[seq_id][0].__class__.__name__, - self.expected_responses[seq_id][0].mac, - str(seq_id), - str(self.expected_responses[seq_id][2]), - ) - self.expected_responses[seq_id][3] = datetime.now() - # Send request - self.connection.send(self.expected_responses[seq_id][0]) - time.sleep(SLEEP_TIME) - timeout_counter = 0 - # Wait max 1 second for acknowledge response from USB-stick - while ( - self.last_seq_id != seq_id - and timeout_counter < 10 - and seq_id != b"0000" - and self.last_seq_id is not None - ): - time.sleep(0.1) - timeout_counter += 1 - if timeout_counter >= 10 and self._send_message_thread_state: - self.resend(seq_id) - _LOGGER.debug("Send message loop stopped") - - def message_handler(self, message): - """Handle received message from Plugwise Zigbee network.""" - - # only save last seq_id and skip special ID's FFFD, FFFE, FFFF - if self.last_seq_id: - if int(self.last_seq_id, 16) < int(message.seq_id, 16) < 65533: - self.last_seq_id = message.seq_id - elif message.seq_id == b"0000" and self.last_seq_id == b"FFFB": - self.last_seq_id = b"0000" - - if isinstance(message, NodeAckSmallResponse): - self._log_status_message(message, message.ack_id) - self._post_message_action( - message.seq_id, message.ack_id, message.__class__.__name__ - ) - else: - if isinstance(message, (NodeAckResponse, NodeAckLargeResponse)): - self._log_status_message(message, message.ack_id) - else: - self._log_status_message(message) - self.message_processor(message) - if message.seq_id not in [b"FFFF", b"FFFE", b"FFFD"]: - self._post_message_action( - message.seq_id, None, message.__class__.__name__ - ) - - def _post_message_action(self, seq_id, ack_response=None, request="unknown"): - """Execute action if request has been successful.""" - resend_request = False - with self.lock_expected_responses: - if seq_id in self.expected_responses: - if ack_response in (*REQUEST_SUCCESS, None): - if self.expected_responses[seq_id][1]: - _LOGGER.debug( - "Execute action %s of request with seq_id %s", - self.expected_responses[seq_id][1].__name__, - str(seq_id), - ) - try: - self.expected_responses[seq_id][1]() - # TODO: narrow exception - except Exception as err: # pylint: disable=broad-except - _LOGGER.error( - "Execution of %s for request with seq_id %s failed: %s", - self.expected_responses[seq_id][1].__name__, - str(seq_id), - err, - ) - del self.expected_responses[seq_id] - elif ack_response in REQUEST_FAILED: - resend_request = True - else: - if not self.last_seq_id: - if b"0000" in self.expected_responses: - self.expected_responses[seq_id] = self.expected_responses[ - b"0000" - ] - del self.expected_responses[b"0000"] - self.last_seq_id = seq_id - else: - _LOGGER.info( - "Drop unexpected %s%s using seq_id %s", - STATUS_RESPONSES.get(ack_response, "") + " ", - request, - str(seq_id), - ) - # Still save it to try and get it back into sync - self.last_seq_id = seq_id - if resend_request: - self.resend(seq_id) - - def _receive_timeout_loop(self): - """Daemon to time out open requests without any (n)ack response message.""" - while self._receive_timeout_thread_state: - resend_list = [] - with self.lock_expected_responses: - for seq_id in list(self.expected_responses.keys()): - if self.expected_responses[seq_id][3] is not None: - if self.expected_responses[seq_id][3] < ( - datetime.now() - timedelta(seconds=MESSAGE_TIME_OUT) - ): - _mac = "" - if self.expected_responses[seq_id][0].mac: - _mac = self.expected_responses[seq_id][0].mac - _LOGGER.info( - "No response within %s seconds timeout for %s to %s with sequence ID %s", - str(MESSAGE_TIME_OUT), - self.expected_responses[seq_id][0].__class__.__name__, - _mac, - str(seq_id), - ) - resend_list.append(seq_id) - for seq_id in resend_list: - self.resend(seq_id) - receive_timeout_checker = 0 - while ( - receive_timeout_checker < MESSAGE_TIME_OUT - and self._receive_timeout_thread_state - ): - time.sleep(1) - receive_timeout_checker += 1 - _LOGGER.debug("Receive timeout loop stopped") - - def _log_status_message(self, message, status=None): - """Log status messages..""" - if status: - if status in STATUS_RESPONSES: - _LOGGER.debug( - "Received %s %s for request with seq_id %s", - STATUS_RESPONSES[status], - message.__class__.__name__, - str(message.seq_id), - ) - else: - with self.lock_expected_responses: - if self.expected_responses.get(message.seq_id): - _LOGGER.warning( - "Received unmanaged (%s) %s in response to %s with seq_id %s", - str(status), - message.__class__.__name__, - str( - self.expected_responses[message.seq_id][ - 1 - ].__class__.__name__ - ), - str(message.seq_id), - ) - else: - _LOGGER.warning( - "Received unmanaged (%s) %s for unknown request with seq_id %s", - str(status), - message.__class__.__name__, - str(message.seq_id), - ) - else: - _LOGGER.info( - "Received %s from %s with sequence id %s", - message.__class__.__name__, - message.mac.decode(UTF8_DECODE), - str(message.seq_id), - ) - - def disconnect_from_stick(self): - """Disconnect from stick and raise error if it fails""" - self._send_message_thread_state = False - self._receive_timeout_thread_state = False - self.connection.disconnect() - - def restart_receive_timeout_thread(self): - """Restart the receive timeout thread if not running""" - if not self._receive_timeout_thread.is_alive(): - _LOGGER.warning( - "Unexpected halt of receive thread, restart thread", - ) - self._receive_timeout_thread = threading.Thread( - None, - self._receive_timeout_loop, - "receive_timeout_thread", - (), - {}, - ) - self._receive_timeout_thread.daemon = True - self._receive_timeout_thread.start() - - def restart_send_message_thread(self): - """Restart the message sender thread if not running""" - if not self._send_message_thread.is_alive(): - _LOGGER.warning( - "Unexpected halt of send thread, restart thread", - ) - self._send_message_thread = threading.Thread( - None, - self._send_message_loop, - "send_messages_thread", - (), - {}, - ) - self._send_message_thread.daemon = True - self._send_message_thread.start() diff --git a/plugwise_usb/exceptions.py b/plugwise_usb/exceptions.py index c18b1a699..dd95c564b 100644 --- a/plugwise_usb/exceptions.py +++ b/plugwise_usb/exceptions.py @@ -5,37 +5,41 @@ class PlugwiseException(Exception): """Base error class for this Plugwise library.""" -class PortError(PlugwiseException): - """Connection to USBstick failed.""" +class CacheError(PlugwiseException): + """Cache error.""" -class StickInitError(PlugwiseException): - """Initialization of USBstick failed.""" +class EnergyError(PlugwiseException): + """Energy error.""" -class NetworkDown(PlugwiseException): - """Zigbee network not online.""" +class FeatureError(PlugwiseException): + """Feature error.""" -class CirclePlusError(PlugwiseException): - """Connection to Circle+ node failed.""" +class MessageError(PlugwiseException): + """Message errors.""" -class InvalidMessageLength(PlugwiseException): - """Invalid message length.""" +class NodeError(PlugwiseException): + """Node failed to execute request.""" -class InvalidMessageHeader(PlugwiseException): - """Invalid message header.""" +class NodeTimeout(PlugwiseException): + """No response from node.""" -class InvalidMessageFooter(PlugwiseException): - """Invalid message footer.""" +class StickError(PlugwiseException): + """Error at USB stick connection.""" -class InvalidMessageChecksum(PlugwiseException): - """Invalid data checksum.""" +class StickFailed(PlugwiseException): + """USB stick failed to accept request.""" -class TimeoutException(PlugwiseException): - """Timeout expired while waiting for response from node.""" +class StickTimeout(PlugwiseException): + """Response timed out from USB-Stick.""" + + +class SubscriptionError(PlugwiseException): + """Subscription Errors.""" diff --git a/plugwise_usb/helpers/__init__.py b/plugwise_usb/helpers/__init__.py new file mode 100644 index 000000000..15f0820b9 --- /dev/null +++ b/plugwise_usb/helpers/__init__.py @@ -0,0 +1 @@ +"""Helper functions for Plugwise USB.""" diff --git a/plugwise_usb/helpers/cache.py b/plugwise_usb/helpers/cache.py new file mode 100644 index 000000000..256a59094 --- /dev/null +++ b/plugwise_usb/helpers/cache.py @@ -0,0 +1,160 @@ +"""Base class for local caching of data.""" + +from __future__ import annotations + +from asyncio import get_running_loop +import logging +from os import getenv as os_getenv, name as os_name +from os.path import expanduser as os_path_expand_user, join as os_path_join + +from aiofiles import open as aiofiles_open, ospath # type: ignore[import-untyped] +from aiofiles.os import ( # type: ignore[import-untyped] + makedirs, + remove as aiofiles_os_remove, +) + +from ..constants import CACHE_DIR, CACHE_KEY_SEPARATOR, UTF8 +from ..exceptions import CacheError + +_LOGGER = logging.getLogger(__name__) + + +class PlugwiseCache: + """Base class to cache plugwise information.""" + + def __init__(self, file_name: str, root_dir: str = "") -> None: + """Initialize class.""" + self._root_dir = root_dir + self._file_name = file_name + self._cache_file_exists: bool = False + self._cache_path: str | None = None + self._cache_file: str | None = None + self._initialized = False + self._loop = get_running_loop() + + @property + def initialized(self) -> bool: + """Indicate if cache file is initialized.""" + return self._initialized + + @property + def cache_root_directory(self) -> str: + """Root directory to store the plugwise cache directory.""" + return self._root_dir + + @cache_root_directory.setter + def cache_root_directory(self, cache_root_dir: str = "") -> None: + """Root directory to store the plugwise cache directory.""" + if self._root_dir != cache_root_dir: + self._initialized = False + self._root_dir = cache_root_dir + + async def initialize_cache(self, create_root_folder: bool = False) -> None: + """Set (and create) the plugwise cache directory to store cache file.""" + if self._root_dir != "": + if not create_root_folder and not await ospath.exists(self._root_dir): + raise CacheError(f"Unable to initialize caching. Cache folder '{self._root_dir}' does not exists.") + cache_dir = self._root_dir + else: + cache_dir = self._get_writable_os_dir() + await makedirs(cache_dir, exist_ok=True) + self._cache_path = cache_dir + + self._cache_file = os_path_join(self._cache_path, self._file_name) + self._cache_file_exists = await ospath.exists(self._cache_file) + self._initialized = True + _LOGGER.debug("Start using network cache file: %s", self._cache_file) + + def _get_writable_os_dir(self) -> str: + """Return the default caching directory based on the OS.""" + if self._root_dir != "": + return self._root_dir + if os_name == "nt": + if (data_dir := os_getenv("APPDATA")) is not None: + return os_path_join(data_dir, CACHE_DIR) + raise CacheError("Unable to detect writable cache folder based on 'APPDATA' environment variable.") + return os_path_join(os_path_expand_user("~"), CACHE_DIR) + + async def write_cache(self, data: dict[str, str], rewrite: bool = False) -> None: + """"Save information to cache file.""" + if not self._initialized: + raise CacheError(f"Unable to save cache. Initialize cache file '{self._file_name}' first.") + + current_data: dict[str, str] = {} + if not rewrite: + current_data = await self.read_cache() + processed_keys: list[str] = [] + data_to_write: list[str] = [] + for _cur_key, _cur_val in current_data.items(): + _write_val = _cur_val + if _cur_key in data: + _write_val = data[_cur_key] + processed_keys.append(_cur_key) + data_to_write.append(f"{_cur_key}{CACHE_KEY_SEPARATOR}{_write_val}\n") + # Write remaining new data + for _key, _value in data.items(): + if _key not in processed_keys: + data_to_write.append(f"{_key}{CACHE_KEY_SEPARATOR}{_value}\n") + + try: + async with aiofiles_open( + file=self._cache_file, + mode="w", + encoding=UTF8, + ) as file_data: + await file_data.writelines(data_to_write) + except OSError as exc: + _LOGGER.warning( + "%s while writing data to cache file %s", exc, str(self._cache_file) + ) + else: + if not self._cache_file_exists: + self._cache_file_exists = True + _LOGGER.debug( + "Saved %s lines to cache file %s", + str(len(data)), + self._cache_file + ) + + async def read_cache(self) -> dict[str, str]: + """Return current data from cache file.""" + if not self._initialized: + raise CacheError(f"Unable to save cache. Initialize cache file '{self._file_name}' first.") + current_data: dict[str, str] = {} + if not self._cache_file_exists: + _LOGGER.debug( + "Cache file '%s' does not exists, return empty cache data", self._cache_file + ) + return current_data + try: + async with aiofiles_open( + file=self._cache_file, + encoding=UTF8, + ) as read_file_data: + lines: list[str] = await read_file_data.readlines() + except OSError as exc: + # suppress file errors as this is expected the first time + # when no cache file exists yet. + _LOGGER.warning( + "OS error %s while reading cache file %s", exc, str(self._cache_file) + ) + return current_data + + for line in lines: + data = line.strip() + if (index_separator := data.find(CACHE_KEY_SEPARATOR)) == -1: + _LOGGER.warning( + "Skip invalid line '%s' in cache file %s", + data, + str(self._cache_file) + ) + break + current_data[data[:index_separator]] = data[index_separator + 1:] + return current_data + + async def delete_cache(self) -> None: + """Delete cache file.""" + if self._cache_file is None: + return + if await ospath.exists(self._cache_file): + await aiofiles_os_remove(self._cache_file) diff --git a/plugwise_usb/helpers/util.py b/plugwise_usb/helpers/util.py new file mode 100644 index 000000000..8e85878fc --- /dev/null +++ b/plugwise_usb/helpers/util.py @@ -0,0 +1,58 @@ +"""Plugwise utility helpers.""" +from __future__ import annotations + +import re + +import crcmod + +from ..constants import HW_MODELS + +crc_fun = crcmod.mkCrcFun(0x11021, rev=False, initCrc=0x0000, xorOut=0x0000) + + +def validate_mac(mac: str) -> bool: + """Validate the supplied string is in a MAC address format.""" + if not re.match("^[A-F0-9]+$", mac): + return False + try: + _ = int(mac, 16) + except ValueError: + return False + return True + + +def version_to_model(version: str | None) -> tuple[str|None, str]: + """Translate hardware_version to device type.""" + if version is None: + return (None, "Unknown") + local_version = version + model = HW_MODELS.get(version) + if model is None: + local_version = version[4:10] + model = HW_MODELS.get(local_version) + if model is None: + # Try again with reversed order + local_version = version[-2:] + version[-4:-2] + version[-6:-4] + model = HW_MODELS.get(local_version) + + return (local_version, model) if model is not None else (None, "Unknown") + + +# octals (and hex) type as int according to +# https://docs.python.org/3/library/stdtypes.html +def uint_to_int(val: int, octals: int) -> int: + """Compute the 2's compliment of int value val for negative values.""" + bits = octals << 2 + if (val & (1 << (bits - 1))) != 0: + val = val - (1 << bits) + return val + + +# octals (and hex) type as int according to +# https://docs.python.org/3/library/stdtypes.html +def int_to_uint(val: int, octals: int) -> int: + """Compute the 2's compliment of int value val for negative values.""" + bits = octals << 2 + if val < 0: + val = val + (1 << bits) + return val diff --git a/plugwise_usb/messages/__init__.py b/plugwise_usb/messages/__init__.py index 4f8d28efb..9e4934e8a 100644 --- a/plugwise_usb/messages/__init__.py +++ b/plugwise_usb/messages/__init__.py @@ -1,30 +1,109 @@ """Plugwise messages.""" -from ..constants import MESSAGE_FOOTER, MESSAGE_HEADER, UTF8_DECODE -from ..util import crc_fun +from __future__ import annotations +from datetime import UTC, datetime +from enum import Enum +from typing import Any + +from ..constants import MESSAGE_FOOTER, MESSAGE_HEADER, UTF8 +from ..exceptions import MessageError +from ..helpers.util import crc_fun + + +class Priority(int, Enum): + """Message priority levels for USB-stick message requests.""" + + CANCEL = 0 + HIGH = 1 + MEDIUM = 2 + LOW = 3 class PlugwiseMessage: - """Plugwise message base.""" + """Plugwise message base class.""" + + _identifier = b"FFFF" + + def __init__(self) -> None: + """Initialize a plugwise message.""" + self._mac: bytes | None = None + self._checksum: bytes | None = None + self._args: list[Any] = [] + self._seq_id: bytes | None = None + self.priority: Priority = Priority.MEDIUM + self.timestamp = datetime.now(tz=UTC) + + @property + def seq_id(self) -> bytes | None: + """Return sequence id.""" + return self._seq_id - ID = b"0000" + @seq_id.setter + def seq_id(self, seq_id: bytes) -> None: + """Assign sequence id.""" + self._seq_id = seq_id - def __init__(self): - self.mac = "" - self.checksum = None - self.args = [] + @property + def identifier(self) -> bytes: + """Return the message ID.""" + return self._identifier - def serialize(self): - """Return message in a serialized format that can be sent out on wire.""" - _args = b"".join(a.serialize() for a in self.args) - msg = self.ID - if self.mac != "": - msg += self.mac - msg += _args - self.checksum = self.calculate_checksum(msg) - return MESSAGE_HEADER + msg + self.checksum + MESSAGE_FOOTER + @property + def mac(self) -> bytes: + """Return mac in bytes.""" + if self._mac is None: + raise MessageError("Mac not set") + return self._mac + + @property + def mac_decoded(self) -> str: + """Return mac in decoded string format.""" + if self._mac is None: + return "not defined" + return self._mac.decode(UTF8) + + def serialize(self) -> bytes: + """Return message in a serialized format that can be sent out.""" + data = self._identifier + if self._mac is not None: + data += self._mac + data += b"".join(a.serialize() for a in self._args) + self._checksum = self.calculate_checksum(data) + return MESSAGE_HEADER + data + self._checksum + MESSAGE_FOOTER @staticmethod - def calculate_checksum(something): + def calculate_checksum(data: bytes) -> bytes: """Calculate crc checksum.""" - return bytes("%04X" % crc_fun(something), UTF8_DECODE) + return bytes(f"{crc_fun(data):04X}", UTF8) + + def __gt__(self, other: PlugwiseMessage) -> bool: + """Greater than.""" + if self.priority.value == other.priority.value: + if self.seq_id is not None and other.seq_id is not None: + return self.seq_id < other.seq_id + return self.timestamp > other.timestamp + return self.priority.value < other.priority.value + + def __lt__(self, other: PlugwiseMessage) -> bool: + """Less than.""" + if self.priority.value == other.priority.value: + if self.seq_id is not None and other.seq_id is not None: + return self.seq_id > other.seq_id + return self.timestamp < other.timestamp + return self.priority.value > other.priority.value + + def __ge__(self, other: PlugwiseMessage) -> bool: + """Greater than or equal.""" + if self.priority.value == other.priority.value: + if self.seq_id is not None and other.seq_id is not None: + return self.seq_id < other.seq_id + return self.timestamp >= other.timestamp + return self.priority.value < other.priority.value + + def __le__(self, other: PlugwiseMessage) -> bool: + """Less than or equal.""" + if self.priority.value == other.priority.value: + if self.seq_id is not None and other.seq_id is not None: + return self.seq_id <= other.seq_id + return self.timestamp <= other.timestamp + return self.priority.value > other.priority.value diff --git a/plugwise_usb/messages/properties.py b/plugwise_usb/messages/properties.py new file mode 100644 index 000000000..a2a065aa4 --- /dev/null +++ b/plugwise_usb/messages/properties.py @@ -0,0 +1,403 @@ +"""Message property types.""" + +import binascii +from datetime import UTC, date, datetime, time, timedelta +import struct +from typing import Any + +from ..constants import LOGADDR_OFFSET, PLUGWISE_EPOCH, UTF8 +from ..exceptions import MessageError +from ..helpers.util import int_to_uint + + +class BaseType: + """Generic single instance property.""" + + def __init__(self, raw_value: Any, length: int) -> None: + """Initialize single instance property.""" + self._raw_value = raw_value + self.length = length + + def serialize(self) -> bytes: + """Return current value into an iterable list of bytes.""" + return bytes(self._raw_value, UTF8) + + def deserialize(self, val: bytes) -> None: + """Convert current value into single data object.""" + raise NotImplementedError() + + def __len__(self) -> int: + """Return length of property object.""" + return self.length + + +class CompositeType: + """Generic multi instance property.""" + + def __init__(self) -> None: + """Initialize multi instance property.""" + self.contents: list[ + String | Int | SInt | UnixTimestamp | Year2k | IntDec | Float | LogAddr + ] = [] + + def serialize(self) -> bytes: + """Return current value of all properties into an iterable list of bytes.""" + return b"".join(a.serialize() for a in self.contents) + + def deserialize(self, val: bytes) -> None: + """Convert data into multiple data objects.""" + for content in self.contents: + _val = val[: len(content)] + content.deserialize(_val) + val = val[len(_val) :] + + def __len__(self) -> int: + """Return length of property objects.""" + return sum(len(x) for x in self.contents) + + +class Bytes(BaseType): + """Bytes based property.""" + + def __init__(self, value: bytes | None, length: int) -> None: + """Initialize bytes based property.""" + super().__init__(value, length) + self._value: bytes | None = None + + def deserialize(self, val: bytes) -> None: + """Set current value.""" + self._value = val + + @property + def value(self) -> bytes: + """Return bytes value.""" + if self._value is None: + raise MessageError("Unable to return value. Deserialize data first") + return self._value + + +class String(BaseType): + """String based property.""" + + def __init__(self, value: str | None, length: int) -> None: + """Initialize string based property.""" + super().__init__(value, length) + self._value: str | None = None + + def deserialize(self, val: bytes) -> None: + """Convert current value into single string formatted object.""" + self._value = val.decode(UTF8) + + @property + def value(self) -> str: + """Return converted int value.""" + if self._value is None: + raise MessageError("Unable to return value. Deserialize data first") + return self._value + + +class Int(BaseType): + """Integer based property.""" + + def __init__(self, value: int, length: int = 2, negative: bool = True) -> None: + """Initialize integer based property.""" + super().__init__(value, length) + self.negative = negative + self._value: int | None = None + + def serialize(self) -> bytes: + """Return current string formatted value into an iterable list of bytes.""" + fmt = "%%0%dX" % self.length + return bytes(fmt % self._raw_value, UTF8) + + def deserialize(self, val: bytes) -> None: + """Convert current value into single string formatted object.""" + self._value = int(val, 16) + if self.negative: + mask = 1 << (self.length * 4 - 1) + self._value = -(self._value & mask) + (self._value & ~mask) + + @property + def value(self) -> int: + """Return converted int value.""" + if self._value is None: + raise MessageError("Unable to return value. Deserialize data first") + return self._value + + +class SInt(BaseType): + """String formatted data with integer value property.""" + + def __init__(self, value: int, length: int = 2) -> None: + """Initialize string formatted data with integer value property.""" + super().__init__(value, length) + self._value: int | None = None + + @staticmethod + def negative(val: int, octals: int) -> int: + """Compute the 2's compliment of int value val for negative values.""" + bits = octals << 2 + if (val & (1 << (bits - 1))) != 0: + val = val - (1 << bits) + return val + + def serialize(self) -> bytes: + """Return current string formatted integer value into an iterable list of bytes.""" + fmt = "%%0%dX" % self.length + return bytes(fmt % int_to_uint(self._raw_value, self.length), UTF8) + + def deserialize(self, val: bytes) -> None: + """Convert current string formatted value into integer value.""" + # TODO: negative is not initialized! 20220405 + self._value = self.negative(int(val, 16), self.length) + + @property + def value(self) -> int: + """Return converted datetime value.""" + if self._value is None: + raise MessageError("Unable to return value. Deserialize data first") + return self._value + + +class UnixTimestamp(BaseType): + """Unix formatted timestamp property.""" + + def __init__(self, value: datetime | None, length: int = 8) -> None: + """Initialize Unix formatted timestamp property.""" + super().__init__(value, length) + self._value: datetime | None = None + + def serialize(self) -> bytes: + """Return current string formatted value into an iterable list of bytes.""" + if not isinstance(self._raw_value, datetime): + raise MessageError("Unable to serialize. Value is not a datetime object") + fmt = "%%0%dX" % self.length + date_in_float = self._raw_value.timestamp() + return bytes(fmt % int(date_in_float), UTF8) + + def deserialize(self, val: bytes) -> None: + """Convert data into datetime based on Unix timestamp format.""" + self._value = datetime.fromtimestamp(int(val, 16), UTC) + + @property + def value(self) -> datetime: + """Return converted datetime value.""" + if self._value is None: + raise MessageError("Unable to return value. Deserialize data first") + return self._value + + +class Year2k(Int): + """Year formatted property. + + Based on offset from the year 2000. + """ + + def deserialize(self, val: bytes) -> None: + """Convert data into year valued based value with offset to Y2k.""" + super().deserialize(val) + if self._value is not None: + self._value += PLUGWISE_EPOCH + + @property + def value(self) -> int: + """Return converted int value.""" + if self._value is None: + raise MessageError("Unable to return value. Deserialize data first") + return self._value + + +class DateTime(CompositeType): + """Date time formatted property. + + format is: YYMMmmmm + where year is offset value from the epoch which is Y2K + and last four bytes are offset from the beginning of the month in minutes. + """ + + def __init__(self, year: int = 0, month: int = 1, minutes: int = 0) -> None: + """Initialize Date time formatted property.""" + CompositeType.__init__(self) + self.year = Year2k(year - PLUGWISE_EPOCH, 2) + self.month = Int(month, 2, False) + self.minutes = Int(minutes, 4, False) + self.contents += [self.year, self.month, self.minutes] + self._value: datetime | None = None + self._deserialized = False + + def deserialize(self, val: bytes) -> None: + """Convert data into datetime based on timestamp with offset to Y2k.""" + if val in (b"FFFFFFFF", b"00000000"): + self._value = None + else: + CompositeType.deserialize(self, val) + self._value = datetime( + year=self.year.value, month=self.month.value, day=1 + ) + timedelta(minutes=self.minutes.value) + self._deserialized = True + + @property + def value_set(self) -> bool: + """True when datetime is converted.""" + if not self._deserialized: + raise MessageError("Unable to return value. Deserialize data first") + return self._value is not None + + @property + def value(self) -> datetime: + """Return converted datetime value.""" + if self._value is None: + raise MessageError("Unable to return value. Deserialize data first") + return self._value + + +class Time(CompositeType): + """Time formatted property.""" + + def __init__(self, hour: int = 0, minute: int = 0, second: int = 0) -> None: + """Initialize time formatted property.""" + CompositeType.__init__(self) + self.hour = Int(hour, 2, False) + self.minute = Int(minute, 2, False) + self.second = Int(second, 2, False) + self.contents += [self.hour, self.minute, self.second] + self._value: time | None = None + + def deserialize(self, val: bytes) -> None: + """Convert data into time value.""" + CompositeType.deserialize(self, val) + self._value = time(self.hour.value, self.minute.value, self.second.value) + + @property + def value(self) -> time: + """Return converted time value.""" + if self._value is None: + raise MessageError("Unable to return value. Deserialize data first") + return self._value + + +class IntDec(BaseType): + """Integer as string formatted data with integer value property.""" + + def __init__(self, value: int, length: int = 2) -> None: + """Initialize integer based property.""" + super().__init__(value, length) + self._value: str | None = None + + def serialize(self) -> bytes: + """Return current string formatted integer value into an iterable list of bytes.""" + fmt = "%%0%dd" % self.length + return bytes(fmt % self._raw_value, UTF8) + + def deserialize(self, val: bytes) -> None: + """Convert data into integer value based on string formatted data format.""" + self._value = val.decode(UTF8) + + @property + def value(self) -> str: + """Return converted string value.""" + if self._value is None: + raise MessageError("Unable to return value. Deserialize data first") + return self._value + + +class RealClockTime(CompositeType): + """Time value property based on integer values.""" + + def __init__(self, hour: int = 0, minute: int = 0, second: int = 0) -> None: + """Initialize time formatted property.""" + super().__init__() + self.hour = IntDec(hour, 2) + self.minute = IntDec(minute, 2) + self.second = IntDec(second, 2) + self.contents += [self.second, self.minute, self.hour] + self._value: time | None = None + + def deserialize(self, val: bytes) -> None: + """Convert data into time value based on integer formatted data.""" + CompositeType.deserialize(self, val) + self._value = time( + int(self.hour.value), + int(self.minute.value), + int(self.second.value), + ) + + @property + def value(self) -> time: + """Return converted time value.""" + if self._value is None: + raise MessageError("Unable to return value. Deserialize data first") + return self._value + + +class RealClockDate(CompositeType): + """Date value property based on integer values.""" + + def __init__(self, day: int = 0, month: int = 0, year: int = 0) -> None: + """Initialize date formatted property.""" + super().__init__() + self.day = IntDec(day, 2) + self.month = IntDec(month, 2) + self.year = IntDec(year - PLUGWISE_EPOCH, 2) + self.contents += [self.day, self.month, self.year] + self._value: date | None = None + + def deserialize(self, val: bytes) -> None: + """Convert data into date value based on integer formatted data.""" + CompositeType.deserialize(self, val) + self._value = date( + int(self.year.value) + PLUGWISE_EPOCH, + int(self.month.value), + int(self.day.value), + ) + + @property + def value(self) -> date: + """Return converted date value.""" + if self._value is None: + raise MessageError("Unable to return value. Deserialize data first") + return self._value + + +class Float(BaseType): + """Float value property.""" + + def __init__(self, value: float, length: int = 4) -> None: + """Initialize float value property.""" + super().__init__(value, length) + self._value: float | None = None + + def deserialize(self, val: bytes) -> None: + """Convert data into float value.""" + hex_val = binascii.unhexlify(val) + self._value = float(struct.unpack("!f", hex_val)[0]) + + @property + def value(self) -> float: + """Return converted float value.""" + if self._value is None: + raise MessageError("Unable to return value. Deserialize data first") + return self._value + + +class LogAddr(Int): + """Log address value property.""" + + def serialize(self) -> bytes: + """Return current log address formatted value into an iterable list of bytes.""" + return bytes("%08X" % ((self._raw_value * 32) + LOGADDR_OFFSET), UTF8) + + def deserialize(self, val: bytes) -> None: + """Convert data into integer value based on log address formatted data.""" + if val == b"00000000": + self._value = int(0) + return + Int.deserialize(self, val) + self._value = (self.value - LOGADDR_OFFSET) // 32 + + @property + def value(self) -> int: + """Return converted time value.""" + if self._value is None: + raise MessageError("Unable to return value. Deserialize data first") + return self._value diff --git a/plugwise_usb/messages/py.typed b/plugwise_usb/messages/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/plugwise_usb/messages/requests.py b/plugwise_usb/messages/requests.py index 9e43968ab..4cf6dd3ed 100644 --- a/plugwise_usb/messages/requests.py +++ b/plugwise_usb/messages/requests.py @@ -1,7 +1,50 @@ """All known request messages to be send to plugwise devices.""" -from ..constants import MESSAGE_FOOTER, MESSAGE_HEADER -from ..messages import PlugwiseMessage -from ..util import ( + +from __future__ import annotations + +from asyncio import Future, TimerHandle, get_running_loop +from collections.abc import Awaitable, Callable, Coroutine +from copy import copy +from datetime import datetime +import logging +from typing import Any + +from ..constants import ( + DAY_IN_MINUTES, + HOUR_IN_MINUTES, + LOGADDR_OFFSET, + MAX_RETRIES, + MESSAGE_FOOTER, + MESSAGE_HEADER, + NODE_TIME_OUT, +) +from ..exceptions import MessageError, NodeError, NodeTimeout, StickError, StickTimeout +from ..messages.responses import ( + CircleClockResponse, + CircleEnergyLogsResponse, + CircleLogDataResponse, + CirclePlusConnectResponse, + CirclePlusRealTimeClockResponse, + CirclePlusScanResponse, + CirclePowerUsageResponse, + CircleRelayInitStateResponse, + EnergyCalibrationResponse, + NodeAckResponse, + NodeFeaturesResponse, + NodeImageValidationResponse, + NodeInfoResponse, + NodePingResponse, + NodeRemoveResponse, + NodeResponse, + NodeSpecificResponse, + PlugwiseResponse, + StickInitResponse, + StickNetworkInfoResponse, + StickResponse, + StickResponseType, +) +from . import PlugwiseMessage, Priority +from .properties import ( DateTime, Int, LogAddr, @@ -12,488 +55,1486 @@ Time, ) +_LOGGER = logging.getLogger(__name__) -class NodeRequest(PlugwiseMessage): - """Base class for request messages to be send from by USB-Stick.""" - - def __init__(self, mac): - PlugwiseMessage.__init__(self) - self.args = [] - self.mac = mac +class PlugwiseRequest(PlugwiseMessage): + """Base class for request messages to be send from by USB-Stick.""" -class NodeNetworkInfoRequest(NodeRequest): - """TODO: PublicNetworkInfoRequest + _reply_identifier: bytes = b"0000" - No arguments + def __init__( + self, + send_fn: Callable[[PlugwiseRequest, bool], Awaitable[PlugwiseResponse | None]] + | None, + mac: bytes | None, + ) -> None: + """Initialize request message.""" + super().__init__() + self._args = [] + self._mac = mac + self._send_counter: int = 0 + self._send_fn = send_fn + self._max_retries: int = MAX_RETRIES + self._loop = get_running_loop() + self._response: PlugwiseResponse | None = None + self._stick_subscription_fn: ( + Callable[ + [ + Callable[[StickResponse], Coroutine[Any, Any, None]], + bytes | None, + tuple[StickResponseType, ...] | None, + ], + Callable[[], None], + ] + | None + ) = None + self._node_subscription_fn: ( + Callable[ + [ + Callable[[PlugwiseResponse], Coroutine[Any, Any, bool]], + bytes | None, + tuple[bytes, ...] | None, + bytes | None, + ], + Callable[[], None], + ] + | None + ) = None + self._unsubscribe_stick_response: Callable[[], None] | None = None + self._unsubscribe_node_response: Callable[[], None] | None = None + self._response_timeout: TimerHandle | None = None + self._response_future: Future[PlugwiseResponse] = self._loop.create_future() + self._waiting_for_response = False + + def __repr__(self) -> str: + """Convert request into writable str.""" + if self._seq_id is None: + return f"{self.__class__.__name__} (mac={self.mac_decoded}, seq_id=UNKNOWN, attempt={self._send_counter})" + return f"{self.__class__.__name__} (mac={self.mac_decoded}, seq_id={self._seq_id!r}, attempt={self._send_counter})" + + def response_future(self) -> Future[PlugwiseResponse]: + """Return awaitable future with response message.""" + if self._response_future.done(): + self._response_future = self._loop.create_future() + return self._response_future + + @property + def response(self) -> PlugwiseResponse: + """Return response message.""" + if self._response is None: + raise StickError("No response available") + return self._response + + @property + def seq_id(self) -> bytes | None: + """Return sequence id assigned to this request.""" + return self._seq_id + + @seq_id.setter + def seq_id(self, seq_id: bytes) -> None: + """Assign sequence id.""" + if self._seq_id is not None: + _LOGGER.warning( + "Unable to change seq_id into %s for request %s", seq_id, self + ) + raise MessageError( + f"Unable to set seq_id to {seq_id!r}. Already set to {self._seq_id!r}" + ) + self._seq_id = seq_id + + async def subscribe_to_response( + self, + stick_subscription_fn: Callable[ + [ + Callable[[StickResponse], Coroutine[Any, Any, None]], + bytes | None, + tuple[StickResponseType, ...] | None, + ], + Coroutine[Any, Any, Callable[[], None]], + ], + node_subscription_fn: Callable[ + [ + Callable[[PlugwiseResponse], Coroutine[Any, Any, bool]], + bytes | None, + tuple[bytes, ...] | None, + bytes | None, + ], + Coroutine[Any, Any, Callable[[], None]], + ], + ) -> None: + """Subscribe to receive the response messages.""" + if self._seq_id is None: + raise MessageError( + "Unable to subscribe to response because seq_id is not set" + ) + self._unsubscribe_stick_response = await stick_subscription_fn( + self._process_stick_response, self._seq_id, None + ) + self._unsubscribe_node_response = await node_subscription_fn( + self.process_node_response, + self._mac, + (self._reply_identifier,), + self._seq_id, + ) + + def _unsubscribe_from_stick(self) -> None: + """Unsubscribe from StickResponse messages.""" + if self._unsubscribe_stick_response is not None: + self._unsubscribe_stick_response() + self._unsubscribe_stick_response = None + + def _unsubscribe_from_node(self) -> None: + """Unsubscribe from NodeResponse messages.""" + if self._unsubscribe_node_response is not None: + self._unsubscribe_node_response() + self._unsubscribe_node_response = None + + def start_response_timeout(self) -> None: + """Start timeout for node response.""" + self.stop_response_timeout() + self._response_timeout = self._loop.call_later( + NODE_TIME_OUT, self._response_timeout_expired + ) + self._waiting_for_response = True + + def stop_response_timeout(self) -> None: + """Stop timeout for node response.""" + self._waiting_for_response = True + if self._response_timeout is not None: + self._response_timeout.cancel() + + @property + def waiting_for_response(self) -> bool: + """Indicate if request is actively waiting for a response.""" + return self._waiting_for_response + + def _response_timeout_expired(self, stick_timeout: bool = False) -> None: + """Handle response timeout.""" + if self._response_future.done(): + return + if stick_timeout: + _LOGGER.info("USB-stick responded with time out to %s", self) + else: + _LOGGER.info( + "No response received for %s within %s seconds", self, NODE_TIME_OUT + ) + self._seq_id = None + self._unsubscribe_from_stick() + self._unsubscribe_from_node() + if stick_timeout: + self._response_future.set_exception( + StickTimeout(f"USB-stick responded with time out to {self}") + ) + else: + self._response_future.set_exception( + NodeTimeout( + f"No device response to {self} within {NODE_TIME_OUT} seconds" + ) + ) + + def assign_error(self, error: BaseException) -> None: + """Assign error for this request.""" + self.stop_response_timeout() + self._unsubscribe_from_stick() + self._unsubscribe_from_node() + if self._response_future.done(): + return + self._waiting_for_response = False + self._response_future.set_exception(error) + + async def process_node_response(self, response: PlugwiseResponse) -> bool: + """Process incoming message from node.""" + if self._seq_id is None: + _LOGGER.warning( + "Received %s as reply to %s without a seq_id assigned", + self._response, + self, + ) + return False + if self._seq_id != response.seq_id: + _LOGGER.warning( + "Received %s as reply to %s which is not correct (expected seq_id=%s)", + self._response, + self, + str(self.seq_id), + ) + return False + if self._response_future.done(): + return False + + self._response = copy(response) + self.stop_response_timeout() + self._unsubscribe_from_stick() + self._unsubscribe_from_node() + if self._send_counter > 1: + _LOGGER.debug( + "Received %s after %s retries as reply to %s", + self._response, + self._send_counter, + self, + ) + else: + _LOGGER.debug("Received %s as reply to %s", self._response, self) + self._response_future.set_result(self._response) + return True + + async def _process_stick_response(self, stick_response: StickResponse) -> None: + """Process incoming stick response.""" + if self._response_future.done(): + return + if self._seq_id is None or self._seq_id != stick_response.seq_id: + return + if stick_response.ack_id == StickResponseType.TIMEOUT: + self._response_timeout_expired(stick_timeout=True) + elif stick_response.ack_id == StickResponseType.FAILED: + self._unsubscribe_from_node() + self._seq_id = None + self._response_future.set_exception( + NodeError(f"Stick failed request {self._seq_id}") + ) + elif stick_response.ack_id == StickResponseType.ACCEPT: + pass + else: + _LOGGER.debug( + "Unknown StickResponseType %s at %s for request %s", + str(stick_response.ack_id), + stick_response, + self, + ) + + async def _send_request( + self, suppress_node_errors: bool = False + ) -> PlugwiseResponse | None: + """Send request.""" + if self._send_fn is None: + return None + return await self._send_fn(self, suppress_node_errors) + + @property + def max_retries(self) -> int: + """Return the maximum retries.""" + return self._max_retries + + @max_retries.setter + def max_retries(self, max_retries: int) -> None: + """Set maximum retries.""" + self._max_retries = max_retries + + @property + def retries_left(self) -> int: + """Return number of retries left.""" + return self._max_retries - self._send_counter + + @property + def resend(self) -> bool: + """Return true if retry counter is not reached yet.""" + return self._max_retries > self._send_counter + + def add_send_attempt(self) -> None: + """Increase the number of retries.""" + self._send_counter += 1 + + +class PlugwiseCancelRequest(PlugwiseRequest): + """Cancel request for priority queue.""" + + def __init__(self) -> None: + """Initialize request message.""" + super().__init__(None, None) + self.priority = Priority.CANCEL + + +class StickNetworkInfoRequest(PlugwiseRequest): + """Request network information. + + Supported protocols : 1.0, 2.0 + Response message : StickNetworkInfoResponse """ - ID = b"0001" + _identifier = b"0001" + _reply_identifier = b"0002" + async def send( + self, suppress_node_errors: bool = False + ) -> StickNetworkInfoResponse | None: + """Send request.""" + result = await self._send_request(suppress_node_errors) + if isinstance(result, StickNetworkInfoResponse): + return result + if result is None: + return None + raise MessageError( + f"Invalid response message. Received {result.__class__.__name__}, expected StickNetworkInfoResponse" + ) -class CirclePlusConnectRequest(NodeRequest): - """Request to connect a Circle+ to the Stick - Response message: CirclePlusConnectResponse - """ +class CirclePlusConnectRequest(PlugwiseRequest): + """Request to connect a Circle+ to the Stick. - ID = b"0004" + Supported protocols : 1.0, 2.0 + Response message : CirclePlusConnectResponse + """ - # This message has an exceptional format and therefore need to override the serialize method - def serialize(self): - # This command has args: byte: key, byte: networkinfo.index, ulong: networkkey = 0 + _identifier = b"0004" + _reply_identifier = b"0005" + + async def send( + self, suppress_node_errors: bool = False + ) -> CirclePlusConnectResponse | None: + """Send request.""" + result = await self._send_request(suppress_node_errors) + if isinstance(result, CirclePlusConnectResponse): + return result + if result is None: + return None + raise MessageError( + f"Invalid response message. Received {result.__class__.__name__}, expected CirclePlusConnectResponse" + ) + + # This message has an exceptional format and therefore + # need to override the serialize method + def serialize(self) -> bytes: + """Convert message to serialized list of bytes.""" + # This command has + # args: byte + # key, byte + # network info.index, ulong + # network key = 0 args = b"00000000000000000000" - msg = self.ID + args + self.mac + msg: bytes = self._identifier + args + if self._mac is not None: + msg += self._mac checksum = self.calculate_checksum(msg) return MESSAGE_HEADER + msg + checksum + MESSAGE_FOOTER -class NodeAddRequest(NodeRequest): - """Inform node it is added to the Plugwise Network it to memory of Circle+ node +class PlugwiseRequestWithNodeAckResponse(PlugwiseRequest): + """Base class of a plugwise request with a NodeAckResponse.""" + + async def send(self, suppress_node_errors: bool = False) -> NodeAckResponse | None: + """Send request.""" + result = await self._send_request(suppress_node_errors) + if isinstance(result, NodeAckResponse): + return result + if result is None: + return None + raise MessageError( + f"Invalid response message. Received {result.__class__.__name__}, expected NodeAckResponse" + ) + - Response message: [acknowledge message] +class NodeAddRequest(PlugwiseRequestWithNodeAckResponse): + """Add node to the Plugwise Network and add it to memory of Circle+ node. + + Supported protocols : 1.0, 2.0 + Response message : TODO check if response is NodeAckResponse """ - ID = b"0007" + _identifier = b"0007" + _reply_identifier = b"0005" - def __init__(self, mac, accept: bool): - super().__init__(mac) + def __init__( + self, + send_fn: Callable[[PlugwiseRequest, bool], Awaitable[PlugwiseResponse | None]], + mac: bytes, + accept: bool, + ) -> None: + """Initialize NodeAddRequest message object.""" + super().__init__(send_fn, mac) accept_value = 1 if accept else 0 - self.args.append(Int(accept_value, length=2)) + self._args.append(Int(accept_value, length=2)) # This message has an exceptional format (MAC at end of message) # and therefore a need to override the serialize method - def serialize(self): - args = b"".join(a.serialize() for a in self.args) - msg = self.ID + args + self.mac + def serialize(self) -> bytes: + """Convert message to serialized list of bytes.""" + args = b"".join(a.serialize() for a in self._args) + msg: bytes = self._identifier + args + if self._mac is not None: + msg += self._mac checksum = self.calculate_checksum(msg) return MESSAGE_HEADER + msg + checksum + MESSAGE_FOOTER -class NodeAllowJoiningRequest(NodeRequest): +class CirclePlusAllowJoiningRequest(PlugwiseRequest): """Enable or disable receiving joining request of unjoined nodes. - Circle+ node will respond with an acknowledge message - - Response message: NodeAckLargeResponse - """ - - ID = b"0008" - def __init__(self, accept: bool): - super().__init__("") - # TODO: Make sure that '01' means enable, and '00' disable joining - val = 1 if accept else 0 - self.args.append(Int(val, length=2)) + Circle+ node will respond + Supported protocols : 1.0, 2.0, + 2.6 (has extra 'AllowThirdParty' field) + Response message : NodeResponse + """ -class NodeResetRequest(NodeRequest): - """TODO: Some kind of reset request + _identifier = b"0008" + _reply_identifier = b"0000" - Response message: ??? + def __init__( + self, + send_fn: Callable[[PlugwiseRequest, bool], Awaitable[PlugwiseResponse | None]], + enable: bool, + ) -> None: + """Initialize NodeAddRequest message object.""" + super().__init__(send_fn, None) + val = 1 if enable else 0 + self._args.append(Int(val, length=2)) + + async def send(self, suppress_node_errors: bool = False) -> NodeResponse | None: + """Send request.""" + result = await self._send_request(suppress_node_errors) + if isinstance(result, NodeResponse): + return result + if result is None: + return None + raise MessageError( + f"Invalid response message. Received {result.__class__.__name__}, expected NodeResponse" + ) + + +class NodeResetRequest(PlugwiseRequest): + """TODO:Some kind of reset request. + + Supported protocols : 1.0, 2.0, 2.1 + Response message : """ - ID = b"0009" + _identifier = b"0009" + _reply_identifier = b"0003" - def __init__(self, mac, moduletype, timeout): - super().__init__(mac) - self.args += [ + def __init__( + self, + send_fn: Callable[[PlugwiseRequest, bool], Awaitable[PlugwiseResponse | None]], + mac: bytes, + moduletype: int, + timeout: int, + ) -> None: + """Initialize NodeResetRequest message object.""" + super().__init__(send_fn, mac) + self._args += [ Int(moduletype, length=2), Int(timeout, length=2), ] + async def send( + self, suppress_node_errors: bool = False + ) -> NodeSpecificResponse | None: + """Send request.""" + result = await self._send_request(suppress_node_errors) + if isinstance(result, NodeSpecificResponse): + return result + if result is None: + return None + raise MessageError( + f"Invalid response message. Received {result.__class__.__name__}, expected NodeSpecificResponse" + ) + + +class StickInitRequest(PlugwiseRequest): + """Initialize USB-Stick. + + Supported protocols : 1.0, 2.0 + Response message : StickInitResponse + """ -class StickInitRequest(NodeRequest): - """Initialize USB-Stick + _identifier = b"000A" + _reply_identifier = b"0011" - Response message: StickInitResponse + def __init__( + self, + send_fn: Callable[[PlugwiseRequest, bool], Awaitable[PlugwiseResponse | None]], + ) -> None: + """Initialize StickInitRequest message object.""" + super().__init__(send_fn, None) + self._max_retries = 1 + + async def send( + self, suppress_node_errors: bool = False + ) -> StickInitResponse | None: + """Send request.""" + if self._send_fn is None: + raise MessageError("Send function missing") + result = await self._send_request(suppress_node_errors) + if isinstance(result, StickInitResponse): + return result + if result is None: + return None + raise MessageError( + f"Invalid response message. Received {result.__class__.__name__}, expected StickInitResponse" + ) + + +class NodeImagePrepareRequest(PlugwiseRequest): + """TODO: Some kind of request to prepare node for a firmware image. + + Supported protocols : 1.0, 2.0 + Response message : """ - ID = b"000A" + _identifier = b"000B" + _reply_identifier = b"0003" - def __init__(self): - """Message for that initializes the Stick""" - # init is the only request message that doesn't send MAC address - super().__init__("") + async def send( + self, suppress_node_errors: bool = False + ) -> NodeSpecificResponse | None: + """Send request.""" + result = await self._send_request(suppress_node_errors) + if isinstance(result, NodeSpecificResponse): + return result + if result is None: + return None + raise MessageError( + f"Invalid response message. Received {result.__class__.__name__}, expected NodeSpecificResponse" + ) -class NodeImagePrepareRequest(NodeRequest): - """TODO: PWEswImagePrepareRequestV1_0 +class NodeImageValidateRequest(PlugwiseRequest): + """TODO: Some kind of request to validate a firmware image for a node. - Response message: TODO: + Supported protocols : 1.0, 2.0 + Response message : NodeImageValidationResponse """ - ID = b"000B" + _identifier = b"000C" + _reply_identifier = b"0010" + + async def send( + self, suppress_node_errors: bool = False + ) -> NodeImageValidationResponse | None: + """Send request.""" + result = await self._send_request(suppress_node_errors) + if isinstance(result, NodeImageValidationResponse): + return result + if result is None: + return None + raise MessageError( + f"Invalid response message. Received {result.__class__.__name__}, expected NodeImageValidationResponse" + ) -class NodePingRequest(NodeRequest): - """Ping node +class NodePingRequest(PlugwiseRequest): + """Ping node. - Response message: NodePingResponse + Supported protocols : 1.0, 2.0 + Response message : NodePingResponse """ - ID = b"000D" + _identifier = b"000D" + _reply_identifier = b"000E" + def __init__( + self, + send_fn: Callable[[PlugwiseRequest, bool], Awaitable[PlugwiseResponse | None]], + mac: bytes, + retries: int = MAX_RETRIES, + ) -> None: + """Initialize NodePingRequest message object.""" + super().__init__(send_fn, mac) + self._reply_identifier = b"000E" + self._max_retries = retries + + async def send(self, suppress_node_errors: bool = False) -> NodePingResponse | None: + """Send request.""" + result = await self._send_request(suppress_node_errors) + if isinstance(result, NodePingResponse): + return result + if result is None: + return None + raise MessageError( + f"Invalid response message. Received {result.__class__.__name__}, expected NodePingResponse" + ) + + +class NodeImageActivateRequest(PlugwiseRequest): + """TODO: Some kind of request to activate a firmware image for a node. + + Supported protocols : 1.0, 2.0 + Response message : + """ -class CirclePowerUsageRequest(NodeRequest): - """Request current power usage + _identifier = b"000F" + _reply_identifier = b"000E" - Response message: CirclePowerUsageResponse + def __init__( + self, + send_fn: Callable[[PlugwiseRequest, bool], Awaitable[PlugwiseResponse | None]], + mac: bytes, + request_type: int, + reset_delay: int, + ) -> None: + """Initialize NodeImageActivateRequest message object.""" + super().__init__(send_fn, mac) + _type = Int(request_type, 2) + _reset_delay = Int(reset_delay, 2) + self._args += [_type, _reset_delay] + + +class CirclePowerUsageRequest(PlugwiseRequest): + """Request current power usage. + + Supported protocols : 1.0, 2.0, 2.1, 2.3 + Response message : CirclePowerUsageResponse """ - ID = b"0012" + _identifier = b"0012" + _reply_identifier = b"0013" + + async def send( + self, suppress_node_errors: bool = False + ) -> CirclePowerUsageResponse | None: + """Send request.""" + result = await self._send_request(suppress_node_errors) + if isinstance(result, CirclePowerUsageResponse): + return result + if result is None: + return None + raise MessageError( + f"Invalid response message. Received {result.__class__.__name__}, expected CirclePowerUsageResponse" + ) + + +class CircleLogDataRequest(PlugwiseRequest): + """TODO: Some kind of request to get log data from a node. + Only supported at protocol version 1.0 ! -class CircleClockSetRequest(NodeRequest): - """Set internal clock of node + + - Response message: [Acknowledge message] + Supported protocols : 1.0 + Response message : CircleLogDataResponse """ - ID = b"0016" + _identifier = b"0014" + _reply_identifier = b"0015" + + def __init__( + self, + send_fn: Callable[[PlugwiseRequest, bool], Awaitable[PlugwiseResponse | None]], + mac: bytes, + start: datetime, + end: datetime, + ) -> None: + """Initialize CircleLogDataRequest message object.""" + super().__init__(send_fn, mac) + passed_days_start = start.day - 1 + month_minutes_start = ( + (passed_days_start * DAY_IN_MINUTES) + + (start.hour * HOUR_IN_MINUTES) + + start.minute + ) + from_abs = DateTime(start.year, start.month, month_minutes_start) + passed_days_end = end.day - 1 + month_minutes_end = ( + (passed_days_end * DAY_IN_MINUTES) + + (end.hour * HOUR_IN_MINUTES) + + end.minute + ) + to_abs = DateTime(end.year, end.month, month_minutes_end) + self._args += [from_abs, to_abs] + + async def send( + self, suppress_node_errors: bool = False + ) -> CircleLogDataResponse | None: + """Send request.""" + result = await self._send_request(suppress_node_errors) + if isinstance(result, CircleLogDataResponse): + return result + if result is None: + return None + raise MessageError( + f"Invalid response message. Received {result.__class__.__name__}, expected CircleLogDataResponse" + ) + + +class CircleClockSetRequest(PlugwiseRequest): + """Set internal clock of node and flash address. + + reset=True, will reset all locally stored energy logs + + Supported protocols : 1.0, 2.0 + Response message : NodeResponse + """ + + _identifier = b"0016" + _reply_identifier = b"0000" + + # pylint: disable=too-many-arguments + def __init__( + self, + send_fn: Callable[[PlugwiseRequest, bool], Awaitable[PlugwiseResponse | None]], + mac: bytes, + dt: datetime, + protocol_version: float, + reset: bool = False, + ) -> None: + """Initialize CircleLogDataRequest message object.""" + super().__init__(send_fn, mac) + self.priority = Priority.HIGH + if protocol_version < 2.0: + # FIXME: Define "absoluteHour" variable + raise MessageError("Unsupported version of CircleClockSetRequest") - def __init__(self, mac, dt, reset=False): - super().__init__(mac) passed_days = dt.day - 1 - month_minutes = (passed_days * 24 * 60) + (dt.hour * 60) + dt.minute + month_minutes = ( + (passed_days * DAY_IN_MINUTES) + (dt.hour * HOUR_IN_MINUTES) + dt.minute + ) this_date = DateTime(dt.year, dt.month, month_minutes) this_time = Time(dt.hour, dt.minute, dt.second) day_of_week = Int(dt.weekday(), 2) - # FIXME: use LogAddr instead if reset: - log_buf_addr = String("00044000", 8) + self._args += [ + this_date, + LogAddr(LOGADDR_OFFSET, 8, False), + this_time, + day_of_week, + ] else: - log_buf_addr = String("FFFFFFFF", 8) - self.args += [this_date, log_buf_addr, this_time, day_of_week] + self._args += [this_date, String("FFFFFFFF", 8), this_time, day_of_week] + async def send(self, suppress_node_errors: bool = False) -> NodeResponse | None: + """Send request.""" + result = await self._send_request(suppress_node_errors) + if isinstance(result, NodeResponse): + return result + if result is None: + return None + raise MessageError( + f"Invalid response message. Received {result.__class__.__name__}, expected NodeResponse" + ) -class CircleSwitchRelayRequest(NodeRequest): - """switches relay on/off - Response message: NodeAckLargeResponse +class CircleRelaySwitchRequest(PlugwiseRequest): + """Request to switches relay on/off. + + Supported protocols : 1.0, 2.0 + Response message : NodeResponse """ - ID = b"0017" + _identifier = b"0017" + _reply_identifier = b"0000" - def __init__(self, mac, on): - super().__init__(mac) + def __init__( + self, + send_fn: Callable[[PlugwiseRequest, bool], Awaitable[PlugwiseResponse | None]], + mac: bytes, + on: bool, + ) -> None: + """Initialize CircleRelaySwitchRequest message object.""" + super().__init__(send_fn, mac) + self.priority = Priority.HIGH val = 1 if on else 0 - self.args.append(Int(val, length=2)) - + self._args.append(Int(val, length=2)) -class CirclePlusScanRequest(NodeRequest): - """Get all linked Circle plugs from Circle+ - a Plugwise network can have 64 devices the node ID value has a range from 0 to 63 + async def send(self, suppress_node_errors: bool = False) -> NodeResponse | None: + """Send request.""" + result = await self._send_request(suppress_node_errors) + if isinstance(result, NodeResponse): + return result + if result is None: + return None + raise MessageError( + f"Invalid response message. Received {result.__class__.__name__}, expected NodeResponse" + ) - Response message: CirclePlusScanResponse - """ - - ID = b"0018" - def __init__(self, mac, node_address): - super().__init__(mac) - self.args.append(Int(node_address, length=2)) - self.node_address = node_address +class CirclePlusScanRequest(PlugwiseRequest): + """Request all linked Circle plugs from Circle+. + A Plugwise network (Circle+) can have 64 devices the node ID value + has a range from 0 to 63 -class NodeRemoveRequest(NodeRequest): - """Request node to be removed from Plugwise network by - removing it from memory of Circle+ node. - - Response message: NodeRemoveResponse + Supported protocols : 1.0, 2.0 + Response message : CirclePlusScanResponse """ - ID = b"001C" - - def __init__(self, mac_circle_plus, mac_to_unjoined): - super().__init__(mac_circle_plus) - self.args.append(String(mac_to_unjoined, length=16)) + _identifier = b"0018" + _reply_identifier = b"0019" - -class NodeInfoRequest(NodeRequest): - """Request status info of node - - Response message: NodeInfoResponse + def __init__( + self, + send_fn: Callable[[PlugwiseRequest, bool], Awaitable[PlugwiseResponse | None]], + mac: bytes, + network_address: int, + ) -> None: + """Initialize CirclePlusScanRequest message object.""" + super().__init__(send_fn, mac) + self._args.append(Int(network_address, length=2)) + self.network_address = network_address + + def __repr__(self) -> str: + """Convert request into writable str.""" + return f"{super().__repr__()[:-1]}, network_address={self.network_address})" + + async def send( + self, suppress_node_errors: bool = False + ) -> CirclePlusScanResponse | None: + """Send request.""" + result = await self._send_request(suppress_node_errors) + if isinstance(result, CirclePlusScanResponse): + return result + if result is None: + return None + raise MessageError( + f"Invalid response message. Received {result.__class__.__name__}, expected CirclePlusScanResponse" + ) + + +class NodeRemoveRequest(PlugwiseRequest): + """Request node to be removed from Plugwise network by removing it from memory of Circle+ node. + + Supported protocols : 1.0, 2.0 + Response message : NodeRemoveResponse """ - ID = b"0023" + _identifier = b"001C" + _reply_identifier = b"001D" + def __init__( + self, + send_fn: Callable[[PlugwiseRequest, bool], Awaitable[PlugwiseResponse | None]], + mac_circle_plus: bytes, + mac_to_unjoined: str, + ) -> None: + """Initialize NodeRemoveRequest message object.""" + super().__init__(send_fn, mac_circle_plus) + self._args.append(String(mac_to_unjoined, length=16)) + + async def send( + self, suppress_node_errors: bool = False + ) -> NodeRemoveResponse | None: + """Send request.""" + result = await self._send_request(suppress_node_errors) + if isinstance(result, NodeRemoveResponse): + return result + if result is None: + return None + raise MessageError( + f"Invalid response message. Received {result.__class__.__name__}, expected NodeRemoveResponse" + ) + + +class NodeInfoRequest(PlugwiseRequest): + """Request status info of node. + + Supported protocols : 1.0, 2.0, 2.3 + Response message : NodeInfoResponse + """ -class CircleCalibrationRequest(NodeRequest): - """Request power calibration settings of node + _identifier = b"0023" + _reply_identifier = b"0024" - Response message: CircleCalibrationResponse + def __init__( + self, + send_fn: Callable[[PlugwiseRequest, bool], Awaitable[PlugwiseResponse | None]], + mac: bytes, + retries: int = MAX_RETRIES, + ) -> None: + """Initialize NodeInfoRequest message object.""" + super().__init__(send_fn, mac) + self._max_retries = retries + + async def send(self, suppress_node_errors: bool = False) -> NodeInfoResponse | None: + """Send request.""" + result = await self._send_request(suppress_node_errors) + if isinstance(result, NodeInfoResponse): + return result + if result is None: + return None + raise MessageError( + f"Invalid response message. Received {result.__class__.__name__}, expected NodeInfoResponse" + ) + + +class EnergyCalibrationRequest(PlugwiseRequest): + """Request power calibration settings of node. + + Supported protocols : 1.0, 2.0 + Response message : EnergyCalibrationResponse """ - ID = b"0026" + _identifier = b"0026" + _reply_identifier = b"0027" + + async def send( + self, suppress_node_errors: bool = False + ) -> EnergyCalibrationResponse | None: + """Send request.""" + result = await self._send_request(suppress_node_errors) + if isinstance(result, EnergyCalibrationResponse): + return result + if result is None: + return None + raise MessageError( + f"Invalid response message. Received {result.__class__.__name__}, expected EnergyCalibrationResponse" + ) -class CirclePlusRealTimeClockSetRequest(NodeRequest): - """Set real time clock of CirclePlus +class CirclePlusRealTimeClockSetRequest(PlugwiseRequest): + """Set real time clock of Circle+. - Response message: [Acknowledge message] + Supported protocols : 1.0, 2.0 + Response message : NodeResponse """ - ID = b"0028" + _identifier = b"0028" + _reply_identifier = b"0000" - def __init__(self, mac, dt): - super().__init__(mac) + def __init__( + self, + send_fn: Callable[[PlugwiseRequest, bool], Awaitable[PlugwiseResponse | None]], + mac: bytes, + dt: datetime, + ): + """Initialize CirclePlusRealTimeClockSetRequest message object.""" + super().__init__(send_fn, mac) + self.priority = Priority.HIGH this_time = RealClockTime(dt.hour, dt.minute, dt.second) day_of_week = Int(dt.weekday(), 2) this_date = RealClockDate(dt.day, dt.month, dt.year) - self.args += [this_time, day_of_week, this_date] + self._args += [this_time, day_of_week, this_date] + async def send(self, suppress_node_errors: bool = False) -> NodeResponse | None: + """Send request.""" + result = await self._send_request(suppress_node_errors) + if isinstance(result, NodeResponse): + return result + if result is None: + return None + raise MessageError( + f"Invalid response message. Received {result.__class__.__name__}, expected NodeResponse" + ) -class CirclePlusRealTimeClockGetRequest(NodeRequest): - """Request current real time clock of CirclePlus - Response message: CirclePlusRealTimeClockResponse +class CirclePlusRealTimeClockGetRequest(PlugwiseRequest): + """Request current real time clock of CirclePlus. + + Supported protocols : 1.0, 2.0 + Response message : CirclePlusRealTimeClockResponse """ - ID = b"0029" + _identifier = b"0029" + _reply_identifier = b"003A" + + async def send( + self, suppress_node_errors: bool = False + ) -> CirclePlusRealTimeClockResponse | None: + """Send request.""" + result = await self._send_request(suppress_node_errors) + if isinstance(result, CirclePlusRealTimeClockResponse): + return result + if result is None: + return None + raise MessageError( + f"Invalid response message. Received {result.__class__.__name__}, expected CirclePlusRealTimeClockResponse" + ) + + +# TODO : Insert +# +# ID = b"003B" = Get Schedule request +# ID = b"003C" = Set Schedule request -class CircleClockGetRequest(NodeRequest): - """Request current internal clock of node +class CircleClockGetRequest(PlugwiseRequest): + """Request current internal clock of node. - Response message: CircleClockResponse + Supported protocols : 1.0, 2.0 + Response message : CircleClockResponse """ - ID = b"003E" + _identifier = b"003E" + _reply_identifier = b"003F" + async def send( + self, suppress_node_errors: bool = False + ) -> CircleClockResponse | None: + """Send request.""" + result = await self._send_request(suppress_node_errors) + if isinstance(result, CircleClockResponse): + return result + if result is None: + return None + raise MessageError( + f"Invalid response message. Received {result.__class__.__name__}, expected CircleClockResponse" + ) -class CircleEnableScheduleRequest(NodeRequest): - """Request to switch Schedule on or off - Response message: TODO: +class CircleActivateScheduleRequest(PlugwiseRequest): + """Request to switch Schedule on or off. + + Supported protocols : 1.0, 2.0 + Response message : TODO: """ - ID = b"0040" + _identifier = b"0040" + _reply_identifier = b"0000" - def __init__(self, mac, on): - super().__init__(mac) + def __init__( + self, + send_fn: Callable[[PlugwiseRequest, bool], Awaitable[PlugwiseResponse | None]], + mac: bytes, + on: bool, + ) -> None: + """Initialize CircleActivateScheduleRequest message object.""" + super().__init__(send_fn, mac) val = 1 if on else 0 - self.args.append(Int(val, length=2)) + self._args.append(Int(val, length=2)) # the second parameter is always 0x01 - self.args.append(Int(1, length=2)) + self._args.append(Int(1, length=2)) -class NodeAddToGroupRequest(NodeRequest): - """Add node to group +class NodeAddToGroupRequest(PlugwiseRequest): + """Add node to group. Response message: TODO: """ - ID = b"0045" + _identifier = b"0045" + _reply_identifier = b"0000" - def __init__(self, mac, group_mac, task_id, port_mask): - super().__init__(mac) + # pylint: disable=too-many-arguments + def __init__( + self, + send_fn: Callable[[PlugwiseRequest, bool], Awaitable[PlugwiseResponse | None]], + mac: bytes, + group_mac: str, + task_id: str, + port_mask: str, + ) -> None: + """Initialize NodeAddToGroupRequest message object.""" + super().__init__(send_fn, mac) group_mac_val = String(group_mac, length=16) task_id_val = String(task_id, length=16) port_mask_val = String(port_mask, length=16) - self.args += [group_mac_val, task_id_val, port_mask_val] + self._args += [group_mac_val, task_id_val, port_mask_val] + async def send(self, suppress_node_errors: bool = False) -> NodeResponse | None: + """Send request.""" + result = await self._send_request(suppress_node_errors) + if isinstance(result, NodeResponse): + return result + if result is None: + return None + raise MessageError( + f"Invalid response message. Received {result.__class__.__name__}, expected NodeResponse" + ) -class NodeRemoveFromGroupRequest(NodeRequest): - """Remove node from group + +class NodeRemoveFromGroupRequest(PlugwiseRequest): + """Remove node from group. Response message: TODO: """ - ID = b"0046" + _identifier = b"0046" + _reply_identifier = b"0000" - def __init__(self, mac, group_mac): - super().__init__(mac) + def __init__( + self, + send_fn: Callable[[PlugwiseRequest, bool], Awaitable[PlugwiseResponse | None]], + mac: bytes, + group_mac: str, + ) -> None: + """Initialize NodeRemoveFromGroupRequest message object.""" + super().__init__(send_fn, mac) group_mac_val = String(group_mac, length=16) - self.args += [group_mac_val] + self._args += [group_mac_val] -class NodeBroadcastGroupSwitchRequest(NodeRequest): - """Broadcast to group to switch +class NodeBroadcastGroupSwitchRequest(PlugwiseRequest): + """Broadcast to group to switch. Response message: TODO: """ - ID = b"0047" + _identifier = b"0047" + _reply_identifier = b"0000" - def __init__(self, group_mac, switch_state: bool): - super().__init__(group_mac) + def __init__( + self, + send_fn: Callable[[PlugwiseRequest, bool], Awaitable[PlugwiseResponse | None]], + group_mac: bytes, + switch_state: bool, + ) -> None: + """Initialize NodeBroadcastGroupSwitchRequest message object.""" + super().__init__(send_fn, group_mac) val = 1 if switch_state else 0 - self.args.append(Int(val, length=2)) + self._args.append(Int(val, length=2)) + + +class CircleEnergyLogsRequest(PlugwiseRequest): + """Request energy usage counters stored a given memory address. + + Response message: CircleEnergyLogsResponse + """ + + _identifier = b"0048" + _reply_identifier = b"0049" + def __init__( + self, + send_fn: Callable[[PlugwiseRequest, bool], Awaitable[PlugwiseResponse | None]], + mac: bytes, + log_address: int, + ) -> None: + """Initialize CircleEnergyLogsRequest message object.""" + super().__init__(send_fn, mac) + self._log_address = log_address + self.priority = Priority.LOW + self._args.append(LogAddr(log_address, 8)) + + def __repr__(self) -> str: + """Convert request into writable str.""" + return f"{super().__repr__()[:-1]}, log_address={self._log_address})" + + async def send( + self, suppress_node_errors: bool = False + ) -> CircleEnergyLogsResponse | None: + """Send request.""" + result = await self._send_request(suppress_node_errors) + if isinstance(result, CircleEnergyLogsResponse): + return result + if result is None: + return None + raise MessageError( + f"Invalid response message. Received {result.__class__.__name__}, expected CircleEnergyLogsResponse" + ) + + +class CircleHandlesOffRequest(PlugwiseRequest): + """?PWSetHandlesOffRequestV1_0. + + Response message: TODO + """ + + _identifier = b"004D" + _reply_identifier = b"0000" -class CircleEnergyCountersRequest(NodeRequest): - """Request energy usage counters storaged a given memory address - Response message: CircleEnergyCountersResponse +class CircleHandlesOnRequest(PlugwiseRequest): + """?PWSetHandlesOnRequestV1_0. + + Response message: ? """ - ID = b"0048" + _identifier = b"004E" + _reply_identifier = b"0000" - def __init__(self, mac, log_address): - super().__init__(mac) - self.args.append(LogAddr(log_address, 8)) +class NodeSleepConfigRequest(PlugwiseRequest): + """Configure timers for SED nodes to minimize battery usage. -class NodeSleepConfigRequest(NodeRequest): - """Configure timers for SED nodes to minimize battery usage + Description: + Response message: NodeResponse with SLEEP_SET - stay_active : Duration in seconds the SED will be awake for receiving commands - sleep_for : Duration in minutes the SED will be in sleeping mode and not able to respond any command - maintenance_interval : Interval in minutes the node will wake up and able to receive commands - clock_sync : Enable/disable clock sync - clock_interval : Duration in minutes the node synchronize its clock + Args: + send_fn: Send function + mac: MAC address of the node + awake_duration: Duration in seconds the SED will be awake for receiving commands + sleep_for: Duration in minutes the SED will be in sleeping mode and not able to respond any command + maintenance_interval: Interval in minutes the node will wake up and able to receive commands + sync_clock: Enable/disable clock sync + clock_interval: Duration in minutes the node synchronize its clock - Response message: Ack message with SLEEP_SET """ - ID = b"0050" + _identifier = b"0050" + _reply_identifier = b"0000" + # pylint: disable=too-many-arguments def __init__( self, - mac, - stay_active: int, + send_fn: Callable[[PlugwiseRequest, bool], Awaitable[PlugwiseResponse | None]], + mac: bytes, + awake_duration: int, maintenance_interval: int, - sleep_for: int, + sleep_duration: int, sync_clock: bool, clock_interval: int, ): - super().__init__(mac) - - stay_active_val = Int(stay_active, length=2) - sleep_for_val = Int(sleep_for, length=4) - maintenance_interval_val = Int(maintenance_interval, length=4) + """Initialize NodeSleepConfigRequest message object.""" + super().__init__(send_fn, mac) + self.awake_duration_val = Int(awake_duration, length=2) + self.sleep_duration_val = Int(sleep_duration, length=4) + self.maintenance_interval_val = Int(maintenance_interval, length=4) val = 1 if sync_clock else 0 - clock_sync_val = Int(val, length=2) - clock_interval_val = Int(clock_interval, length=4) - self.args += [ - stay_active_val, - maintenance_interval_val, - sleep_for_val, - clock_sync_val, - clock_interval_val, + self.clock_sync_val = Int(val, length=2) + self.clock_interval_val = Int(clock_interval, length=4) + self._args += [ + self.awake_duration_val, + self.maintenance_interval_val, + self.sleep_duration_val, + self.clock_sync_val, + self.clock_interval_val, ] + async def send(self, suppress_node_errors: bool = False) -> NodeResponse | None: + """Send request.""" + result = await self._send_request(suppress_node_errors) + _LOGGER.warning("NodeSleepConfigRequest result: %s", result) + if isinstance(result, NodeResponse): + return result + if result is None: + return None + raise MessageError( + f"Invalid response message. Received {result.__class__.__name__}, expected NodeAckResponse" + ) + + def __repr__(self) -> str: + """Convert request into writable str.""" + return f"{super().__repr__()[:-1]}, awake_duration={self.awake_duration_val.value}, maintenance_interval={self.maintenance_interval_val.value}, sleep_duration={self.sleep_duration_val.value}, clock_interval={self.clock_interval_val.value}, clock_sync={self.clock_sync_val.value})" + -class NodeSelfRemoveRequest(NodeRequest): - """ +class NodeSelfRemoveRequest(PlugwiseRequest): + """TODO: Remove node?. + + - """ - ID = b"0051" + _identifier = b"0051" + _reply_identifier = b"0000" -class NodeMeasureIntervalRequest(NodeRequest): - """Configure the logging interval of power measurement in minutes +class CircleMeasureIntervalRequest(PlugwiseRequest): + """Configure the logging interval of energy measurement in minutes. - Response message: TODO: + FIXME: Make sure production interval is a multiply of consumption !! + + Response message: Ack message with ??? TODO: """ - ID = b"0057" + _identifier = b"0057" + _reply_identifier = b"0000" + + def __init__( + self, + send_fn: Callable[[PlugwiseRequest, bool], Awaitable[PlugwiseResponse | None]], + mac: bytes, + consumption: int, + production: int, + ): + """Initialize CircleMeasureIntervalRequest message object.""" + super().__init__(send_fn, mac) + self._args.append(Int(consumption, length=4)) + self._args.append(Int(production, length=4)) - def __init__(self, mac, usage, production): - super().__init__(mac) - self.args.append(Int(usage, length=4)) - self.args.append(Int(production, length=4)) +class NodeClearGroupMacRequest(PlugwiseRequest): + """TODO: usage?. -class NodeClearGroupMacRequest(NodeRequest): - """TODO: - Response message: ???? + Response message: TODO """ - ID = b"0058" + _identifier = b"0058" + _reply_identifier = b"0000" - def __init__(self, mac, taskId): - super().__init__(mac) - self.args.append(Int(taskId, length=2)) + def __init__( + self, + send_fn: Callable[[PlugwiseRequest, bool], Awaitable[PlugwiseResponse | None]], + mac: bytes, + taskId: int, + ) -> None: + """Initialize NodeClearGroupMacRequest message object.""" + super().__init__(send_fn, mac) + self._args.append(Int(taskId, length=2)) -class CircleSetScheduleValueRequest(NodeRequest): - """Send chunk of On/Off/StandbyKiller Schedule to Circle(+) +class CircleSetScheduleValueRequest(PlugwiseRequest): + """Send chunk of On/Off/StandbyKiller Schedule to Circle(+). Response message: TODO: """ - ID = b"0059" + _identifier = b"0059" + _reply_identifier = b"0000" - def __init__(self, mac, val): - super().__init__(mac) - self.args.append(SInt(val, length=4)) + def __init__( + self, + send_fn: Callable[[PlugwiseRequest, bool], Awaitable[PlugwiseResponse | None]], + mac: bytes, + val: int, + ) -> None: + """Initialize CircleSetScheduleValueRequest message object.""" + super().__init__(send_fn, mac) + self._args.append(SInt(val, length=4)) -class NodeFeaturesRequest(NodeRequest): - """Request feature set node supports +class NodeFeaturesRequest(PlugwiseRequest): + """Request feature set node supports. Response message: NodeFeaturesResponse """ - ID = b"005F" + _identifier = b"005F" + _reply_identifier = b"0060" - -class ScanConfigureRequest(NodeRequest): - """Configure a Scan node - - reset_timer : Delay in minutes when signal is send when no motion is detected - sensitivity : Sensitivity of Motion sensor (High, Medium, Off) - light : Daylight override to only report motion when lightlevel is below calibrated level - - Response message: [Acknowledge message] + def __init__( + self, + send_fn: Callable[[PlugwiseRequest, bool], Awaitable[PlugwiseResponse | None]], + mac: bytes, + val: int, + ) -> None: + """Initialize NodeFeaturesRequest message object.""" + super().__init__(send_fn, mac) + self._args.append(SInt(val, length=4)) + + async def send( + self, suppress_node_errors: bool = False + ) -> NodeFeaturesResponse | None: + """Send request.""" + result = await self._send_request(suppress_node_errors) + if isinstance(result, NodeFeaturesResponse): + return result + if result is None: + return None + raise MessageError( + f"Invalid response message. Received {result.__class__.__name__}, expected NodeFeaturesResponse" + ) + + +class ScanConfigureRequest(PlugwiseRequest): + """Configure a Scan node. + + reset_timer : Delay in minutes when signal is send + when no motion is detected. Minimum 1, max 255 + sensitivity : Sensitivity of Motion sensor + (High, Medium, Off) + light : Daylight override to only report motion + when light level is below calibrated level + + Response message: NodeAckResponse """ - ID = b"0101" - - def __init__(self, mac, reset_timer: int, sensitivity: int, light: bool): - super().__init__(mac) + _identifier = b"0101" + _reply_identifier = b"0100" + # pylint: disable=too-many-arguments + def __init__( + self, + send_fn: Callable[[PlugwiseRequest, bool], Awaitable[PlugwiseResponse | None]], + mac: bytes, + reset_timer: int, + sensitivity: int, + light: bool, + ): + """Initialize ScanConfigureRequest message object.""" + super().__init__(send_fn, mac) reset_timer_value = Int(reset_timer, length=2) # Sensitivity: HIGH(0x14), MEDIUM(0x1E), OFF(0xFF) sensitivity_value = Int(sensitivity, length=2) light_temp = 1 if light else 0 light_value = Int(light_temp, length=2) - self.args += [ + self._args += [ sensitivity_value, light_value, reset_timer_value, ] + async def send(self, suppress_node_errors: bool = False) -> NodeAckResponse | None: + """Send request.""" + result = await self._send_request(suppress_node_errors) + if isinstance(result, NodeAckResponse): + return result + if result is None: + return None + raise MessageError( + f"Invalid response message. Received {result.__class__.__name__}, expected NodeAckResponse" + ) + -class ScanLightCalibrateRequest(NodeRequest): - """Calibrate light sensitivity +class ScanLightCalibrateRequest(PlugwiseRequest): + """Calibrate light sensitivity. - Response message: [Acknowledge message] + Response message: NodeAckResponse """ - ID = b"0102" + _identifier = b"0102" + _reply_identifier = b"0100" + async def send(self, suppress_node_errors: bool = False) -> NodeAckResponse | None: + """Send request.""" + result = await self._send_request(suppress_node_errors) + if isinstance(result, NodeAckResponse): + return result + if result is None: + return None + raise MessageError( + f"Invalid response message. Received {result.__class__.__name__}, expected NodeAckResponse" + ) -class SenseReportIntervalRequest(NodeRequest): + +class SenseReportIntervalRequest(PlugwiseRequest): """Sets the Sense temperature and humidity measurement report interval in minutes. + Based on this interval, periodically a 'SenseReportResponse' message is sent by the Sense node - Response message: [Acknowledge message] + Response message: NodeAckResponse """ - ID = b"0102" + _identifier = b"0103" + _reply_identifier = b"0100" - def __init__(self, mac, interval): - super().__init__(mac) - self.args.append(Int(interval, length=2)) - - -class CircleInitialRelaisStateRequest(NodeRequest): - """Get or set initial Relais state - - Response message: CircleInitialRelaisStateResponse + def __init__( + self, + send_fn: Callable[[PlugwiseRequest, bool], Awaitable[PlugwiseResponse | None]], + mac: bytes, + interval: int, + ): + """Initialize ScanLightCalibrateRequest message object.""" + super().__init__(send_fn, mac) + self._args.append(Int(interval, length=2)) + + async def send(self, suppress_node_errors: bool = False) -> NodeAckResponse | None: + """Send request.""" + result = await self._send_request(suppress_node_errors) + if isinstance(result, NodeAckResponse): + return result + if result is None: + return None + raise MessageError( + f"Invalid response message. Received {result.__class__.__name__}, expected NodeAckResponse" + ) + + +class CircleRelayInitStateRequest(PlugwiseRequest): + """Get or set initial relay state after power-up of Circle. + + Supported protocols : 2.6 + Response message : CircleInitRelayStateResponse """ - ID = b"0138" + _identifier = b"0138" + _reply_identifier = b"0139" - def __init__(self, mac, configure: bool, relais_state: bool): - super().__init__(mac) - set_or_get = Int(1 if configure else 0, length=2) - relais = Int(1 if relais_state else 0, length=2) - self.args += [set_or_get, relais] + def __init__( + self, + send_fn: Callable[[PlugwiseRequest, bool], Awaitable[PlugwiseResponse | None]], + mac: bytes, + configure: bool, + relay_state: bool, + ) -> None: + """Initialize CircleRelayInitStateRequest message object.""" + super().__init__(send_fn, mac) + self.priority = Priority.LOW + self.set_or_get = Int(1 if configure else 0, length=2) + self.relay = Int(1 if relay_state else 0, length=2) + self._args += [self.set_or_get, self.relay] + + async def send( + self, suppress_node_errors: bool = False + ) -> CircleRelayInitStateResponse | None: + """Send request.""" + result = await self._send_request(suppress_node_errors) + if isinstance(result, CircleRelayInitStateResponse): + return result + if result is None: + return None + raise MessageError( + f"Invalid response message. Received {result.__class__.__name__}, expected CircleRelayInitStateResponse" + ) diff --git a/plugwise_usb/messages/responses.py b/plugwise_usb/messages/responses.py index a3bae1f3a..50dedbccc 100644 --- a/plugwise_usb/messages/responses.py +++ b/plugwise_usb/messages/responses.py @@ -1,15 +1,17 @@ """All known response messages to be received from plugwise devices.""" -from datetime import datetime -from ..constants import MESSAGE_FOOTER, MESSAGE_HEADER, MESSAGE_LARGE, MESSAGE_SMALL -from ..exceptions import ( - InvalidMessageChecksum, - InvalidMessageFooter, - InvalidMessageHeader, - InvalidMessageLength, -) -from ..messages import PlugwiseMessage -from ..util import ( +from __future__ import annotations + +from datetime import datetime +from enum import Enum +from typing import Any, Final + +from ..api import NodeType +from ..constants import MESSAGE_FOOTER, MESSAGE_HEADER +from ..exceptions import MessageError +from . import PlugwiseMessage, Priority +from .properties import ( + Bytes, DateTime, Float, Int, @@ -21,386 +23,736 @@ UnixTimestamp, ) +NODE_JOIN_ID: Final = b"0006" +NODE_AWAKE_RESPONSE_ID: Final = b"004F" +NODE_REJOIN_ID: Final = b"0061" +NODE_SWITCH_GROUP_ID: Final = b"0056" +SENSE_REPORT_ID: Final = b"0105" + +JOIN_AVAILABLE_SEQ_ID: Final = b"FFFC" +REJOIN_RESPONSE_SEQ_ID: Final = b"FFFD" +AWAKE_RESPONSE_SEQ_ID: Final = b"FFFE" +SWITCH_GROUP_RESPONSE_SEQ_ID: Final = b"FFFF" + +BROADCAST_IDS: Final = ( + JOIN_AVAILABLE_SEQ_ID, + REJOIN_RESPONSE_SEQ_ID, + AWAKE_RESPONSE_SEQ_ID, + SWITCH_GROUP_RESPONSE_SEQ_ID, +) + + +class StickResponseType(bytes, Enum): + """Response message types for stick.""" + + # Minimal value = b"00C0", maximum value = b"00F3" + # Below the currently known values: + + ACCEPT = b"00C1" + FAILED = b"00C2" + TIMEOUT = b"00E1" + + +class NodeResponseType(bytes, Enum): + """Response types of a 'NodeResponse' reply message.""" + + CLOCK_ACCEPTED = b"00D7" + JOIN_ACCEPTED = b"00D9" + RELAY_SWITCHED_OFF = b"00DE" + RELAY_SWITCHED_ON = b"00D8" + RELAY_SWITCH_FAILED = b"00E2" + SED_CONFIG_ACCEPTED = b"00F6" + REAL_TIME_CLOCK_ACCEPTED = b"00DF" + REAL_TIME_CLOCK_FAILED = b"00E7" + + # TODO: Validate these response types + SED_CONFIG_FAILED = b"00F7" + POWER_LOG_INTERVAL_ACCEPTED = b"00F8" + POWER_CALIBRATION_ACCEPTED = b"00DA" + CIRCLE_PLUS = b"00DD" + + +class NodeAckResponseType(bytes, Enum): + """Response types of a 'NodeAckResponse' reply message.""" -class NodeResponse(PlugwiseMessage): + SCAN_CONFIG_ACCEPTED = b"00BE" + SCAN_CONFIG_FAILED = b"00BF" + SCAN_LIGHT_CALIBRATION_ACCEPTED = b"00BD" + SENSE_INTERVAL_ACCEPTED = b"00B3" + SENSE_INTERVAL_FAILED = b"00B4" + SENSE_BOUNDARIES_ACCEPTED = b"00B5" + SENSE_BOUNDARIES_FAILED = b"00B6" + + +class NodeAwakeResponseType(int, Enum): + """Response types of a 'NodeAwakeResponse' reply message.""" + + MAINTENANCE = 0 # SED awake for maintenance + FIRST = 1 # SED awake for the first time + STARTUP = 2 # SED awake after restart, e.g. after reinserting a battery + STATE = 3 # SED awake to report state (Motion / Temperature / Humidity + UNKNOWN = 4 + BUTTON = 5 # SED awake due to button press + + +class PlugwiseResponse(PlugwiseMessage): """Base class for response messages received by USB-Stick.""" - def __init__(self, format_size=None): + def __init__( + self, + identifier: bytes, + decode_ack: bool = False, + decode_mac: bool = True, + ) -> None: + """Initialize a response message.""" super().__init__() - self.format_size = format_size - self.params = [] - self.timestamp = None - self.seq_id = None - self.msg_id = None - self.ack_id = None - if self.format_size == MESSAGE_SMALL: - self.len_correction = -12 - elif self.format_size == MESSAGE_LARGE: - self.len_correction = 4 - else: - self.len_correction = 0 - - def deserialize(self, response): - self.timestamp = datetime.now() + self._identifier = identifier + self._mac: bytes | None = None + self._ack_id: bytes | None = None + self._decode_ack = decode_ack + self._decode_mac = decode_mac + self._params: list[Any] = [] + self._seq_id: bytes = b"FFFF" + self._retries = 0 + + def __repr__(self) -> str: + """Convert request into writable str.""" + return f"{self.__class__.__name__} (mac={self.mac_decoded}, seq_id={self._seq_id!r}, retries={self._retries})" + + @property + def retries(self) -> int: + """Number of retries for processing.""" + return self._retries + + @retries.setter + def retries(self, retries: int) -> None: + """Set number of retries for processing.""" + self._retries = retries + + @property + def ack_id(self) -> bytes | None: + """Return the acknowledge id.""" + return self._ack_id + + def deserialize(self, response: bytes, has_footer: bool = True) -> None: + """Deserialize bytes to actual message properties.""" + # Header if response[:4] != MESSAGE_HEADER: - raise InvalidMessageHeader( - f"Invalid message header {str(response[:4])} for {self.__class__.__name__}" + raise MessageError( + "Invalid message header" + + str({response[:4]}) + + " for " + + self.__class__.__name__ ) - if response[-2:] != MESSAGE_FOOTER: - raise InvalidMessageFooter( - f"Invalid message footer {str(response[-2:])} for {self.__class__.__name__}" + response = response[4:] + + # Footer + if has_footer: + if response[-2:] != MESSAGE_FOOTER: + raise MessageError( + "Invalid message footer " + + str(response[-2:]) + + " for " + + self.__class__.__name__ + ) + response = response[:-2] + + # Checksum + if (check := self.calculate_checksum(response[:-4])) != response[-4:]: + raise MessageError( + f"Invalid checksum for {self.__class__.__name__}, " + + f"expected {check!r} got " + + str(response[-4:]), ) - _calculated_checksum = self.calculate_checksum(response[4:-6]) - _message_checksum = response[-6:-2] - if _calculated_checksum != _message_checksum: - raise InvalidMessageChecksum( - f"Invalid checksum for {self.__class__.__name__}, expected {str(_calculated_checksum)} got {str(_message_checksum)}", + response = response[:-4] + + # ID and Sequence number + if self._identifier != response[:4]: + raise MessageError( + "Invalid message identifier received " + + f"expected {self._identifier!r} " + + f"got {response[:4]!r}" ) + self._seq_id = response[4:8] + response = response[8:] + + # Message data if len(response) != len(self): - raise InvalidMessageLength( - f"Invalid message length received for {self.__class__.__name__}, expected {str(len(self))} bytes got {str(len(response))}" + raise MessageError( + "Invalid message length received for " + + f"{self.__class__.__name__}, expected " + + f"{len(self)} bytes got {len(response)}" ) - - self.msg_id = response[4:8] - self.seq_id = response[8:12] - response = response[12:] - if self.format_size in [MESSAGE_SMALL, MESSAGE_LARGE]: - self.ack_id = response[:4] + if self._decode_ack: + self._ack_id = response[:4] response = response[4:] - if self.format_size != MESSAGE_SMALL: - self.mac = response[:16] + if self._decode_mac: + self._mac = response[:16] response = response[16:] - response = self._parse_params(response) - - _args = b"".join(a.serialize() for a in self.args) - msg = self.ID - if self.mac != "": - msg += self.mac - msg += _args - - def _parse_params(self, response): - for param in self.params: + if len(response) > 0: + try: + response = self._parse_params(response) + except ValueError as ve: + raise MessageError( + "Failed to parse data " + + str(response) + + "for message " + + self.__class__.__name__ + ) from ve + + def _parse_params(self, response: bytes) -> bytes: + for param in self._params: my_val = response[: len(param)] param.deserialize(my_val) response = response[len(my_val) :] + return response - def __len__(self): - arglen = sum(len(x) for x in self.params) - return 34 + arglen + self.len_correction + def __len__(self) -> int: + """Return the size of response message.""" + offset_ack = 4 if self._decode_ack else 0 + offset_mac = 16 if self._decode_mac else 0 + return offset_ack + offset_mac + sum(len(x) for x in self._params) -class NodeAckSmallResponse(NodeResponse): - """Acknowledge message without source MAC +class StickResponse(PlugwiseResponse): + """Response message from USB-Stick. - Response to: Any message + Response to: Any message request """ - ID = b"0000" - - def __init__(self): - super().__init__(MESSAGE_SMALL) + def __init__(self) -> None: + """Initialize StickResponse message object.""" + super().__init__(b"0000", decode_ack=True, decode_mac=False) + self.priority = Priority.HIGH + + def __repr__(self) -> str: + """Convert request into writable str.""" + if self.ack_id is None: + return f"StickResponse (seq_id={self._seq_id!r}, retries={self._retries}, ack=UNKNOWN)" + return f"StickResponse (seq_id={self._seq_id!r}, retries={self._retries}, ack={StickResponseType(self.ack_id).name})" + + @property + def response_type(self) -> StickResponseType: + """Return acknowledge response type.""" + if self.ack_id is None: + raise MessageError("Acknowledge ID is unknown") + return StickResponseType(self.ack_id) + + +class NodeResponse(PlugwiseResponse): + """Report status from node to a specific request. + + Supported protocols : 1.0, 2.0 + Response to requests: TODO: complete list + CircleClockSetRequest + CirclePlusRealTimeClockSetRequest + CircleRelaySwitchRequest + """ + def __init__(self) -> None: + """Initialize NodeResponse message object.""" + super().__init__(b"0000", decode_ack=True) -class NodeAckLargeResponse(NodeResponse): - """Acknowledge message with source MAC + def __repr__(self) -> str: + """Convert request into writable str.""" + if self.ack_id is None: + return f"{super().__repr__()[:-1]}, ack=UNKNOWN)" + return ( + f"{super().__repr__()[:-1]}, ack={str(NodeResponseType(self.ack_id).name)})" + ) - Response to: Any message - """ + @property + def response_type(self) -> NodeResponseType: + """Return acknowledge response type.""" + if self.ack_id is None: + raise MessageError("Acknowledge ID is unknown") + return NodeResponseType(self.ack_id) - ID = b"0000" - def __init__(self): - super().__init__(MESSAGE_LARGE) +class StickNetworkInfoResponse(PlugwiseResponse): + """Report status of zigbee network. - -class CirclePlusQueryResponse(NodeResponse): - """TODO: - Response to : ??? + Supported protocols : 1.0, 2.0 + Response to request : NodeNetworkInfoRequest """ - ID = b"0002" - - def __init__(self): - super().__init__() - self.channel = String(None, length=2) - self.source_mac_id = String(None, length=16) + def __init__(self) -> None: + """Initialize NodeNetworkInfoResponse message object.""" + super().__init__(b"0002") + self._channel = Int(0, length=2) + self._source_mac_id = String(None, length=16) self.extended_pan_id = String(None, length=16) self.unique_network_id = String(None, length=16) - self.new_node_mac_id = String(None, length=16) + self._new_node_mac_id = String(None, length=16) self.pan_id = String(None, length=4) self.idx = Int(0, length=2) - self.params += [ - self.channel, - self.source_mac_id, + self._params += [ + self._channel, + self._source_mac_id, self.extended_pan_id, self.unique_network_id, - self.new_node_mac_id, + self._new_node_mac_id, self.pan_id, self.idx, ] - def __len__(self): - arglen = sum(len(x) for x in self.params) - return 18 + arglen + @property + def channel(self) -> int: + """Return zigbee channel.""" + return self._channel.value - def deserialize(self, response): - super().deserialize(response) - # Clear first two characters of mac ID, as they contain part of the short PAN-ID - self.new_node_mac_id.value = b"00" + self.new_node_mac_id.value[2:] + @property + def new_node_mac_id(self) -> str: + """New node mac_id.""" + # Clear first two characters of mac ID, as they contain + # part of the short PAN-ID + return "00" + self._new_node_mac_id.value[2:] -class CirclePlusQueryEndResponse(NodeResponse): - """TODO: - PWAckReplyV1_0 - +class NodeSpecificResponse(PlugwiseResponse): + """TODO: Report some sort of status from node. - Response to : ??? - """ + PWAckReplyV1_0 + - ID = b"0003" + Supported protocols : 1.0, 2.0 + Response to requests: Unknown: TODO + """ - def __init__(self): - super().__init__() + def __init__(self) -> None: + """Initialize NodeSpecificResponse message object.""" + super().__init__(b"0003") self.status = Int(0, 4) - self.params += [self.status] + self._params += [self.status] - def __len__(self): - arglen = sum(len(x) for x in self.params) - return 18 + arglen +class CirclePlusConnectResponse(PlugwiseResponse): + """CirclePlus connected to the network. -class CirclePlusConnectResponse(NodeResponse): - """CirclePlus connected to the network - - Response to : CirclePlusConnectRequest + Supported protocols : 1.0, 2.0 + Response to request : CirclePlusConnectRequest """ - ID = b"0005" - - def __init__(self): - super().__init__() + def __init__(self) -> None: + """Initialize CirclePlusConnectResponse message object.""" + super().__init__(b"0005") self.existing = Int(0, 2) self.allowed = Int(0, 2) - self.params += [self.existing, self.allowed] - - def __len__(self): - arglen = sum(len(x) for x in self.params) - return 18 + arglen + self._params += [self.existing, self.allowed] -class NodeJoinAvailableResponse(NodeResponse): - """Message from an unjoined node to notify it is available to join a plugwise network +class NodeJoinAvailableResponse(PlugwiseResponse): + """Request from Node to join a plugwise network. - Response to : + Supported protocols : 1.0, 2.0 + Response to request : No request as every unjoined node is requesting + to be added automatically """ - ID = b"0006" + def __init__(self) -> None: + """Initialize NodeJoinAvailableResponse message object.""" + super().__init__(NODE_JOIN_ID) -class StickInitResponse(NodeResponse): - """Returns the configuration and status of the USB-Stick +class NodePingResponse(PlugwiseResponse): + """Ping and RSSI (Received Signal Strength Indicator) response from node. - Optional: - - circle_plus_mac - - network_id - - - - + - rssi_in : Incoming last hop RSSI target + - rssi_out : Last hop RSSI source + - time difference in ms - Response to: StickInitRequest + Supported protocols : 1.0, 2.0 + Response to request : NodePingRequest """ - ID = b"0011" - - def __init__(self): - super().__init__() - self.unknown1 = Int(0, length=2) - self.network_is_online = Int(0, length=2) - self.circle_plus_mac = String(None, length=16) - self.network_id = Int(0, 4, False) - self.unknown2 = Int(0, length=2) - self.params += [ - self.unknown1, - self.network_is_online, - self.circle_plus_mac, - self.network_id, - self.unknown2, + def __init__(self) -> None: + """Initialize NodePingResponse message object.""" + super().__init__(b"000E") + self._rssi_in = Int(0, length=2) + self._rssi_out = Int(0, length=2) + self._rtt = Int(0, 4, False) + self._params += [ + self._rssi_in, + self._rssi_out, + self._rtt, ] + @property + def rssi_in(self) -> int: + """Return inbound RSSI level.""" + return self._rssi_in.value + + @property + def rssi_out(self) -> int: + """Return outbound RSSI level.""" + return self._rssi_out.value + + @property + def rtt(self) -> int: + """Return round trip time.""" + return self._rtt.value -class NodePingResponse(NodeResponse): - """Ping response from node - - incomingLastHopRssiTarget (received signal strength indicator) - - lastHopRssiSource - - timediffInMs +class NodeImageValidationResponse(PlugwiseResponse): + """TODO: Some kind of response to validate a firmware image for a node. - Response to : NodePingRequest + Supported protocols : 1.0, 2.0 + Response to request : NodeImageValidationRequest """ - ID = b"000E" + def __init__(self, timestamp: datetime | None = None) -> None: + """Initialize NodeImageValidationResponse message object.""" + super().__init__(b"0010") + self.image_timestamp = UnixTimestamp(timestamp) + self._params += [self.image_timestamp] - def __init__(self): - super().__init__() - self.rssi_in = Int(0, length=2) - self.rssi_out = Int(0, length=2) - self.ping_ms = Int(0, 4, False) - self.params += [ - self.rssi_in, - self.rssi_out, - self.ping_ms, + +class StickInitResponse(PlugwiseResponse): + """Returns the configuration and status of the USB-Stick. + + Optional: + - circle_plus_mac + - network_id + - TODO: Two unknown parameters + + Supported protocols : 1.0, 2.0 + Response to request : StickInitRequest + """ + + def __init__(self) -> None: + """Initialize StickInitResponse message object.""" + super().__init__(b"0011") + self._unknown1 = Int(0, length=2) + self._network_online = Int(0, length=2) + self._mac_nc = String(None, length=16) + self._network_id = Int(0, 4, False) + self._unknown2 = Int(0, length=2) + self._params += [ + self._unknown1, + self._network_online, + self._mac_nc, + self._network_id, + self._unknown2, ] + @property + def mac_network_controller(self) -> str: + """Return the mac of the network controller (Circle+).""" + # Replace first 2 characters by 00 for mac of circle+ node + return "00" + self._mac_nc.value[2:] + + @property + def network_id(self) -> int: + """Return network ID.""" + return self._network_id.value -class CirclePowerUsageResponse(NodeResponse): - """Returns power usage as impulse counters for several different timeframes + @property + def network_online(self) -> bool: + """Return state of network.""" + return self._network_online.value == 1 - Response to : CirclePowerUsageRequest + def __repr__(self) -> str: + """Convert request into writable str.""" + return f"{super().__repr__()[:-1]}, network_controller={self.mac_network_controller}, network_online={self.network_online})" + + +class CirclePowerUsageResponse(PlugwiseResponse): + """Returns power usage as impulse counters for several different time frames. + + Supported protocols : 1.0, 2.0, 2.1, 2.3 + Response to request : CirclePowerUsageRequest """ - ID = b"0013" + def __init__(self, protocol_version: str = "2.3") -> None: + """Initialize CirclePowerUsageResponse message object.""" + super().__init__(b"0013") + self._pulse_1s = Int(0, 4) + self._pulse_8s = Int(0, 4) + self._nanosecond_offset = Int(0, 4) + self._params += [self._pulse_1s, self._pulse_8s] + if protocol_version == "2.3": + self._pulse_counter_consumed = Int(0, 8) + self._pulse_counter_produced = Int(0, 8) + self._params += [ + self._pulse_counter_consumed, + self._pulse_counter_produced, + ] + self._params += [self._nanosecond_offset] + + @property + def pulse_1s(self) -> int: + """Return pulses last second.""" + return self._pulse_1s.value + + @property + def pulse_8s(self) -> int: + """Return pulses last 8 seconds.""" + return self._pulse_8s.value + + @property + def offset(self) -> int: + """Return offset in nanoseconds.""" + return self._nanosecond_offset.value + + @property + def consumed_counter(self) -> int: + """Return consumed pulses.""" + return self._pulse_counter_consumed.value + + @property + def produced_counter(self) -> int: + """Return consumed pulses.""" + return self._pulse_counter_produced.value + + +class CircleLogDataResponse(PlugwiseResponse): + """TODO: Returns some kind of log data from a node. + + Only supported at protocol version 1.0 ! + + + + + + + Supported protocols : 1.0 + Response to: CircleLogDataRequest + """ - def __init__(self): - super().__init__() - self.pulse_1s = Int(0, 4) - self.pulse_8s = Int(0, 4) - self.pulse_hour_consumed = Int(0, 8) - self.pulse_hour_produced = Int(0, 8) - self.nanosecond_offset = Int(0, 4) - self.params += [ - self.pulse_1s, - self.pulse_8s, - self.pulse_hour_consumed, - self.pulse_hour_produced, - self.nanosecond_offset, + def __init__(self) -> None: + """Initialize CircleLogDataResponse message object.""" + super().__init__(b"0015") + self.stored_abs = DateTime() + self.powermeterinfo = Int(0, 8, False) + self.flashaddress = LogAddr(0, length=8) + self._params += [ + self.stored_abs, + self.powermeterinfo, + self.flashaddress, ] -class CirclePlusScanResponse(NodeResponse): - """Returns the MAC of a registered node at the specified memory address +class CirclePlusScanResponse(PlugwiseResponse): + """Returns the MAC of a registered node at the specified memory address of a Circle+. - Response to: CirclePlusScanRequest + Supported protocols : 1.0, 2.0 + Response to request : CirclePlusScanRequest """ - ID = b"0019" + def __init__(self) -> None: + """Initialize CirclePlusScanResponse message object.""" + super().__init__(b"0019") + self._registered_mac = String(None, length=16) + self._network_address = Int(0, 2, False) + self._params += [self._registered_mac, self._network_address] - def __init__(self): - super().__init__() - self.node_mac = String(None, length=16) - self.node_address = Int(0, 2, False) - self.params += [self.node_mac, self.node_address] + @property + def registered_mac(self) -> str: + """Return the mac of the node.""" + return self._registered_mac.value + @property + def network_address(self) -> int: + """Return the network address.""" + return self._network_address.value -class NodeRemoveResponse(NodeResponse): - """Returns conformation (or not) if node is removed from the Plugwise network - by having it removed from the memory of the Circle+ + def __repr__(self) -> str: + """Convert response into writable str.""" + return f"{super().__repr__()[:-1]}, network_address={self.network_address}, registered_mac={self.registered_mac})" - Response to: NodeRemoveRequest - """ - ID = b"001D" +class NodeRemoveResponse(PlugwiseResponse): + """Confirmation (or not) if node is removed from the Plugwise network. - def __init__(self): - super().__init__() + Also confirmation it has been removed from the memory of the Circle+ + + Supported protocols : 1.0, 2.0 + Response to request : NodeRemoveRequest + """ + + def __init__(self) -> None: + """Initialize NodeRemoveResponse message object.""" + super().__init__(b"001D") self.node_mac_id = String(None, length=16) self.status = Int(0, 2) - self.params += [self.node_mac_id, self.status] + self._params += [self.node_mac_id, self.status] -class NodeInfoResponse(NodeResponse): - """Returns the status information of Node +class NodeInfoResponse(PlugwiseResponse): + """Returns the status information of Node. - Response to: NodeInfoRequest + Supported protocols : 1.0, 2.0, 2.3 + Response to request : NodeInfoRequest """ - ID = b"0024" - - def __init__(self): - super().__init__() - self.datetime = DateTime() - self.last_logaddr = LogAddr(0, length=8) - self.relay_state = Int(0, length=2) - # TODO: 20220126 snake-style - # pylint: disable=invalid-name - self.hz = Int(0, length=2) - self.hw_ver = String(None, length=12) - self.fw_ver = UnixTimestamp(0) - self.node_type = Int(0, length=2) - self.params += [ - self.datetime, - self.last_logaddr, - self.relay_state, - self.hz, - self.hw_ver, - self.fw_ver, - self.node_type, + def __init__(self, protocol_version: str = "2.0") -> None: + """Initialize NodeInfoResponse message object.""" + super().__init__(b"0024") + + self._logaddress_pointer = LogAddr(0, length=8) + if protocol_version == "1.0": + # FIXME: Define "absoluteHour" variable + self.datetime = DateTime() + self._relay_state = Int(0, length=2) + self._params += [ + self.datetime, + self._logaddress_pointer, + self._relay_state, + ] + elif protocol_version == "2.0": + self.datetime = DateTime() + self._relay_state = Int(0, length=2) + self._params += [ + self.datetime, + self._logaddress_pointer, + self._relay_state, + ] + elif protocol_version == "2.3": + # FIXME: Define "State_mask" variable + self.state_mask = Int(0, length=2) + self._params += [ + self.datetime, + self._logaddress_pointer, + self.state_mask, + ] + self._frequency = Int(0, length=2) + self._hw_ver = String(None, length=12) + self._fw_ver = UnixTimestamp(None) + self._node_type = Int(0, length=2) + self._params += [ + self._frequency, + self._hw_ver, + self._fw_ver, + self._node_type, ] + @property + def hardware(self) -> str: + """Return hardware id.""" + return str(self._hw_ver.value) -class CircleCalibrationResponse(NodeResponse): - """returns the calibration settings of node + @property + def firmware(self) -> datetime: + """Return timestamp of firmware.""" + return self._fw_ver.value - Response to: CircleCalibrationRequest - """ + @property + def node_type(self) -> NodeType: + """Return the type of node.""" + return NodeType(self._node_type.value) - ID = b"0027" + @property + def current_logaddress_pointer(self) -> int: + """Return the current energy log address.""" + return self._logaddress_pointer.value - def __init__(self): - super().__init__() - self.gain_a = Float(0, 8) - self.gain_b = Float(0, 8) - self.off_tot = Float(0, 8) - self.off_noise = Float(0, 8) - self.params += [self.gain_a, self.gain_b, self.off_tot, self.off_noise] + @property + def relay_state(self) -> bool: + """Return state of relay.""" + return self._relay_state.value == 1 + @property + def frequency(self) -> int: + """Return frequency config of node.""" + return self._frequency.value -class CirclePlusRealTimeClockResponse(NodeResponse): - """returns the real time clock of CirclePlus node + def __repr__(self) -> str: + """Convert request into writable str.""" + return f"{super().__repr__()[:-1]}, log_address_pointer={self._logaddress_pointer.value})" - Response to: CirclePlusRealTimeClockGetRequest - """ - ID = b"003A" +class EnergyCalibrationResponse(PlugwiseResponse): + """Returns the calibration settings of node. - def __init__(self): - super().__init__() + Supported protocols : 1.0, 2.0 + Response to request : EnergyCalibrationRequest + """ + def __init__(self) -> None: + """Initialize EnergyCalibrationResponse message object.""" + super().__init__(b"0027") + self._gain_a = Float(0, 8) + self._gain_b = Float(0, 8) + self._off_tot = Float(0, 8) + self._off_noise = Float(0, 8) + self._params += [self._gain_a, self._gain_b, self._off_tot, self._off_noise] + + @property + def gain_a(self) -> float: + """Return the gain A.""" + return self._gain_a.value + + @property + def gain_b(self) -> float: + """Return the gain B.""" + return self._gain_b.value + + @property + def off_tot(self) -> float: + """Return the offset.""" + return self._off_tot.value + + @property + def off_noise(self) -> float: + """Return the offset.""" + return self._off_noise.value + + +class CirclePlusRealTimeClockResponse(PlugwiseResponse): + """returns the real time clock of CirclePlus node. + + Supported protocols : 1.0, 2.0 + Response to request : CirclePlusRealTimeClockGetRequest + """ + + def __init__(self) -> None: + """Initialize CirclePlusRealTimeClockResponse message object.""" + super().__init__(b"003A") self.time = RealClockTime() self.day_of_week = Int(0, 2, False) self.date = RealClockDate() - self.params += [self.time, self.day_of_week, self.date] + self._params += [self.time, self.day_of_week, self.date] -class CircleClockResponse(NodeResponse): - """Returns the current internal clock of Node +# TODO : Insert +# +# ID = b"003D" = Schedule response - Response to: CircleClockGetRequest - """ - ID = b"003F" +class CircleClockResponse(PlugwiseResponse): + """Returns the current internal clock of Node. - def __init__(self): - super().__init__() + Supported protocols : 1.0, 2.0 + Response to request : CircleClockGetRequest + """ + + def __init__(self) -> None: + """Initialize CircleClockResponse message object.""" + super().__init__(b"003F") self.time = Time() self.day_of_week = Int(0, 2, False) self.unknown = Int(0, 2) self.unknown2 = Int(0, 4) - self.params += [self.time, self.day_of_week, self.unknown, self.unknown2] + self._params += [ + self.time, + self.day_of_week, + self.unknown, + self.unknown2, + ] -class CircleEnergyCountersResponse(NodeResponse): - """Returns historical energy usage of requested memory address +class CircleEnergyLogsResponse(PlugwiseResponse): + """Returns historical energy usage of requested memory address. + Each response contains 4 energy counters at specified 1 hour timestamp - Response to: CircleEnergyCountersRequest + Response to: CircleEnergyLogsRequest """ - ID = b"0049" - - def __init__(self): - super().__init__() + def __init__(self) -> None: + """Initialize CircleEnergyLogsResponse message object.""" + super().__init__(b"0049") self.logdate1 = DateTime() self.pulses1 = Int(0, 8) self.logdate2 = DateTime() @@ -409,8 +761,8 @@ def __init__(self): self.pulses3 = Int(0, 8) self.logdate4 = DateTime() self.pulses4 = Int(0, 8) - self.logaddr = LogAddr(0, length=8) - self.params += [ + self._logaddr = LogAddr(0, length=8) + self._params += [ self.logdate1, self.pulses1, self.logdate2, @@ -419,161 +771,257 @@ def __init__(self): self.pulses3, self.logdate4, self.pulses4, - self.logaddr, + self._logaddr, ] + @property + def log_address(self) -> int: + """Return the gain A.""" + return self._logaddr.value + + @property + def log_data(self) -> dict[int, tuple[datetime | None, int | None]]: + """Return log data.""" + log_data: dict[int, tuple[datetime | None, int | None]] = {} + if self.logdate1.value_set: + log_data[1] = (self.logdate1.value, self.pulses1.value) + else: + log_data[1] = (None, None) + if self.logdate2.value_set: + log_data[2] = (self.logdate2.value, self.pulses2.value) + else: + log_data[2] = (None, None) + if self.logdate3.value_set: + log_data[3] = (self.logdate3.value, self.pulses3.value) + else: + log_data[3] = (None, None) + if self.logdate4.value_set: + log_data[4] = (self.logdate4.value, self.pulses4.value) + else: + log_data[4] = (None, None) + return log_data -class NodeAwakeResponse(NodeResponse): - """A sleeping end device (SED: Scan, Sense, Switch) sends - this message to announce that is awake. Awake types: + def __repr__(self) -> str: + """Convert request into writable str.""" + return f"{super().__repr__()[:-1]}, log_address={self._logaddr.value})" + + +class NodeAwakeResponse(PlugwiseResponse): + """Announce that a sleeping end device is awake. + + A sleeping end device (SED) like Scan, Sense, Switch) sends + this message to announce that is awake. + Possible awake types: - 0 : The SED joins the network for maintenance - 1 : The SED joins a network for the first time - - 2 : The SED joins a network it has already joined, e.g. after reinserting a battery - - 3 : When a SED switches a device group or when reporting values such as temperature/humidity + - 2 : The SED joins a network it has already joined, e.g. after + reinserting a battery + - 3 : When a SED switches a device group or when reporting values + such as temperature/humidity - 4 : TODO: Unknown - 5 : A human pressed the button on a SED to wake it up Response to: """ - ID = b"004F" + def __init__(self) -> None: + """Initialize NodeAwakeResponse message object.""" + super().__init__(NODE_AWAKE_RESPONSE_ID) + self._awake_type = Int(0, 2, False) + self._params += [self._awake_type] + self.priority = Priority.HIGH - def __init__(self): - super().__init__() - self.awake_type = Int(0, 2, False) - self.params += [self.awake_type] + @property + def awake_type(self) -> NodeAwakeResponseType: + """Return the node awake type.""" + return NodeAwakeResponseType(self._awake_type.value) + def __repr__(self) -> str: + """Convert request into writable str.""" + return f"{super().__repr__()[:-1]}, awake_type={self.awake_type.name})" -class NodeSwitchGroupResponse(NodeResponse): - """A sleeping end device (SED: Scan, Sense, Switch) sends + +class NodeSwitchGroupResponse(PlugwiseResponse): + """Announce groups on/off. + + A sleeping end device (SED: Scan, Sense, Switch) sends this message to switch groups on/off when the configured switching conditions have been met. Response to: """ - ID = b"0056" - - def __init__(self): - super().__init__() + def __init__(self) -> None: + """Initialize NodeSwitchGroupResponse message object.""" + super().__init__(NODE_SWITCH_GROUP_ID) self.group = Int(0, 2, False) - self.power_state = Int(0, length=2) - self.params += [ + self._power_state = Int(0, length=2) + self._params += [ self.group, - self.power_state, + self._power_state, ] + @property + def switch_state(self) -> bool: + """Return state of switch (True = On, False = Off).""" + return (self._power_state.value != 0) -class NodeFeaturesResponse(NodeResponse): - """Returns supported features of node - TODO: FeatureBitmask + def __repr__(self) -> str: + """Convert request into writable str.""" + return f"{super().__repr__()[:-1]}, power_state={self._power_state.value}, group={self.group.value})" + +class NodeFeaturesResponse(PlugwiseResponse): + """Returns supported features of node. + + TODO: Feature Bit mask Response to: NodeFeaturesRequest """ - ID = b"0060" - - def __init__(self): - super().__init__() + def __init__(self) -> None: + """Initialize NodeFeaturesResponse message object.""" + super().__init__(b"0060") self.features = String(None, length=16) - self.params += [self.features] + self._params += [self.features] -class NodeJoinAckResponse(NodeResponse): +class NodeRejoinResponse(PlugwiseResponse): """Notification message when node (re)joined existing network again. - Sent when a SED (re)joins the network e.g. when you reinsert the battery of a Scan + + Sent when a SED (re)joins the network e.g. when you reinsert + the battery of a Scan + + sequence number is always FFFD Response to: or NodeAddRequest """ - ID = b"0061" + def __init__(self) -> None: + """Initialize NodeRejoinResponse message object.""" + super().__init__(NODE_REJOIN_ID) - def __init__(self): - super().__init__() - # sequence number is always FFFD +class NodeAckResponse(PlugwiseResponse): + """Acknowledge message in regular format. -class NodeAckResponse(NodeResponse): - """Acknowledge message in regular format Sent by nodes supporting plugwise 2.4 protocol version - Response to: + Response to: ? """ - ID = b"0100" + def __init__(self) -> None: + """Initialize NodeAckResponse message object.""" + super().__init__(b"0100") + self._node_ack_type = Bytes(None, length=4) + self._params += [self._node_ack_type] + self.priority = Priority.HIGH - def __init__(self): - super().__init__() - self.ack_id = Int(0, 2, False) + @property + def node_ack_type(self) -> NodeAckResponseType: + """Return acknowledge response type.""" + return NodeAckResponseType(self._node_ack_type.value) + + def __repr__(self) -> str: + """Convert request into writable str.""" + return f"{super().__repr__()[:-1]}, Ack={self.node_ack_type.name})" -class SenseReportResponse(NodeResponse): +class SenseReportResponse(PlugwiseResponse): """Returns the current temperature and humidity of a Sense node. - The interval this report is sent is configured by the 'SenseReportIntervalRequest' request + + The interval this report is sent is configured by + the 'SenseReportIntervalRequest' request Response to: """ - ID = b"0105" - - def __init__(self): - super().__init__() + def __init__(self) -> None: + """Initialize SenseReportResponse message object.""" + super().__init__(SENSE_REPORT_ID) self.humidity = Int(0, length=4) self.temperature = Int(0, length=4) - self.params += [self.humidity, self.temperature] + self._params += [self.humidity, self.temperature] -class CircleInitialRelaisStateResponse(NodeResponse): - """Returns the initial relais state. +class CircleRelayInitStateResponse(PlugwiseResponse): + """Returns the configured relay state after power-up of Circle. - Response to: CircleInitialRelaisStateRequest + Supported protocols : 2.6 + Response to request : CircleRelayInitStateRequest """ - ID = b"0139" + def __init__(self) -> None: + """Initialize CircleRelayInitStateResponse message object.""" + super().__init__(b"0139") + self.is_get = Int(0, length=2) + self.relay = Int(0, length=2) + self._params += [self.is_get, self.relay] - def __init__(self): - super().__init__() - set_or_get = Int(0, length=2) - relais = Int(0, length=2) - self.params += [set_or_get, relais] - - -id_to_message = { - b"0002": CirclePlusQueryResponse(), - b"0003": CirclePlusQueryEndResponse(), - b"0005": CirclePlusConnectResponse(), - b"0006": NodeJoinAvailableResponse(), - b"000E": NodePingResponse(), - b"0011": StickInitResponse(), - b"0013": CirclePowerUsageResponse(), - b"0019": CirclePlusScanResponse(), - b"001D": NodeRemoveResponse(), - b"0024": NodeInfoResponse(), - b"0027": CircleCalibrationResponse(), - b"003A": CirclePlusRealTimeClockResponse(), - b"003F": CircleClockResponse(), - b"0049": CircleEnergyCountersResponse(), - b"0060": NodeFeaturesResponse(), - b"0100": NodeAckResponse(), - b"0105": SenseReportResponse(), -} - - -def get_message_response(message_id, length, seq_id): + +def get_message_object( # noqa: C901 + identifier: bytes, length: int, seq_id: bytes +) -> PlugwiseResponse | None: """Return message class based on sequence ID, Length of message or message ID.""" + # First check for known sequence ID's - if seq_id == b"FFFD": - return NodeJoinAckResponse() - if seq_id == b"FFFE": + if seq_id == REJOIN_RESPONSE_SEQ_ID: + return NodeRejoinResponse() + if seq_id == AWAKE_RESPONSE_SEQ_ID: return NodeAwakeResponse() - if seq_id == b"FFFF": + if seq_id == SWITCH_GROUP_RESPONSE_SEQ_ID: return NodeSwitchGroupResponse() + if seq_id == JOIN_AVAILABLE_SEQ_ID: + return NodeJoinAvailableResponse() # No fixed sequence ID, continue at message ID - if message_id == b"0000": + if identifier == b"0000": if length == 20: - return NodeAckSmallResponse() + return StickResponse() if length == 36: - return NodeAckLargeResponse() + return NodeResponse() return None - return id_to_message.get(message_id, None) + + # Regular response ID's + if identifier == b"0002": + return StickNetworkInfoResponse() + if identifier == b"0003": + return NodeSpecificResponse() + if identifier == b"0005": + return CirclePlusConnectResponse() + if identifier == NODE_JOIN_ID: + return NodeJoinAvailableResponse() + if identifier == b"000E": + return NodePingResponse() + if identifier == b"0010": + return NodeImageValidationResponse() + if identifier == b"0011": + return StickInitResponse() + if identifier == b"0013": + return CirclePowerUsageResponse() + if identifier == b"0015": + return CircleLogDataResponse() + if identifier == b"0019": + return CirclePlusScanResponse() + if identifier == b"001D": + return NodeRemoveResponse() + if identifier == b"0024": + return NodeInfoResponse() + if identifier == b"0027": + return EnergyCalibrationResponse() + if identifier == b"003A": + return CirclePlusRealTimeClockResponse() + if identifier == b"003F": + return CircleClockResponse() + if identifier == b"0049": + return CircleEnergyLogsResponse() + if identifier == NODE_SWITCH_GROUP_ID: + return NodeSwitchGroupResponse() + if identifier == b"0060": + return NodeFeaturesResponse() + if identifier == b"0100": + return NodeAckResponse() + if identifier == SENSE_REPORT_ID: + return SenseReportResponse() + if identifier == b"0139": + return CircleRelayInitStateResponse() + raise MessageError(f"Unknown message for identifier {identifier!r}") diff --git a/plugwise_usb/network/__init__.py b/plugwise_usb/network/__init__.py new file mode 100644 index 000000000..cab52a073 --- /dev/null +++ b/plugwise_usb/network/__init__.py @@ -0,0 +1,546 @@ +"""Plugwise network.""" + +# region - Imports + +from __future__ import annotations + +from asyncio import Task, create_task, gather, sleep +from collections.abc import Callable, Coroutine +from datetime import datetime, timedelta +import logging +from typing import Any + +from ..api import NodeEvent, NodeType, PlugwiseNode, StickEvent +from ..connection import StickController +from ..constants import UTF8 +from ..exceptions import CacheError, MessageError, NodeError, StickError, StickTimeout +from ..helpers.util import validate_mac +from ..messages.requests import CirclePlusAllowJoiningRequest, NodePingRequest +from ..messages.responses import ( + NODE_AWAKE_RESPONSE_ID, + NODE_JOIN_ID, + NODE_REJOIN_ID, + NodeAwakeResponse, + NodeInfoResponse, + NodeJoinAvailableResponse, + NodePingResponse, + NodeRejoinResponse, + NodeResponseType, + PlugwiseResponse, +) +from ..nodes import get_plugwise_node +from .registry import StickNetworkRegister + +_LOGGER = logging.getLogger(__name__) +# endregion + + +class StickNetwork: + """USB-Stick zigbee network class.""" + + accept_join_request = False + _event_subscriptions: dict[StickEvent, int] = {} + + def __init__( + self, + controller: StickController, + ) -> None: + """Initialize the USB-Stick zigbee network class.""" + self._controller = controller + self._register = StickNetworkRegister( + bytes(controller.mac_coordinator, encoding=UTF8), + controller.send, + ) + self._is_running: bool = False + + self._cache_folder: str = "" + self._cache_enabled: bool = False + self._cache_folder_create = False + + self._discover: bool = False + self._nodes: dict[str, PlugwiseNode] = {} + self._awake_discovery: dict[str, datetime] = {} + + self._node_event_subscribers: dict[ + Callable[[], None], + tuple[ + Callable[[NodeEvent, str], Coroutine[Any, Any, None]], + tuple[NodeEvent, ...], + ], + ] = {} + + self._unsubscribe_stick_event: Callable[[], None] | None = None + self._unsubscribe_node_awake: Callable[[], None] | None = None + self._unsubscribe_node_join: Callable[[], None] | None = None + self._unsubscribe_node_rejoin: Callable[[], None] | None = None + + self._discover_sed_tasks: dict[str, Task[bool]] = {} + + # region - Properties + + @property + def cache_enabled(self) -> bool: + """Return usage of cache of network register.""" + return self._cache_enabled + + @cache_enabled.setter + def cache_enabled(self, enable: bool = True) -> None: + """Enable or disable usage of cache of network register.""" + self._register.cache_enabled = enable + if self._cache_enabled != enable: + for node in self._nodes.values(): + node.cache_enabled = enable + self._cache_enabled = enable + + @property + def cache_folder(self) -> str: + """Path to cache data of network register.""" + return self._cache_folder + + @cache_folder.setter + def cache_folder(self, cache_folder: str) -> None: + """Set path to cache data of network register.""" + self._cache_folder = cache_folder + self._register.cache_folder = cache_folder + for node in self._nodes.values(): + node.cache_folder = cache_folder + + @property + def cache_folder_create(self) -> bool: + """Return if cache folder must be create when it does not exists.""" + return self._cache_folder_create + + @cache_folder_create.setter + def cache_folder_create(self, enable: bool = True) -> None: + """Enable or disable creation of cache folder.""" + self._cache_folder_create = enable + + async def initialize_cache(self) -> None: + """Initialize the cache folder.""" + if not self._cache_enabled: + raise CacheError("Unable to initialize cache, enable cache first.") + await self._register.initialize_cache(self._cache_folder_create) + + @property + def controller_active(self) -> bool: + """Return True if network controller (Circle+) is discovered and active.""" + if self._controller.mac_coordinator in self._nodes: + return self._nodes[self._controller.mac_coordinator].available + return False + + @property + def is_running(self) -> bool: + """Return state of network discovery.""" + return self._is_running + + @property + def nodes( + self, + ) -> dict[str, PlugwiseNode]: + """Dictionary with all discovered network nodes with the mac address as the key.""" + return self._nodes + + @property + def registry(self) -> dict[int, tuple[str, NodeType | None]]: + """Return dictionary with all registered (joined) nodes.""" + return self._register.registry + + # endregion + + async def register_node(self, mac: str) -> bool: + """Register node to Plugwise network.""" + if not validate_mac(mac): + raise NodeError(f"Invalid mac '{mac}' to register") + address = await self._register.register_node(mac) + return await self._discover_node(address, mac, None) + + async def clear_cache(self) -> None: + """Clear register cache.""" + await self._register.clear_register_cache() + + async def unregister_node(self, mac: str) -> None: + """Unregister node from current Plugwise network.""" + await self._register.unregister_node(mac) + await self._nodes[mac].unload() + self._nodes.pop(mac) + + # region - Handle stick connect/disconnect events + def _subscribe_to_protocol_events(self) -> None: + """Subscribe to events from protocol.""" + self._unsubscribe_stick_event = self._controller.subscribe_to_stick_events( + self._handle_stick_event, + (StickEvent.CONNECTED, StickEvent.DISCONNECTED), + ) + + async def _subscribe_to_node_events(self) -> None: + """Subscribe to events from protocol.""" + self._unsubscribe_node_awake = await self._controller.subscribe_to_messages( + self.node_awake_message, + None, + (NODE_AWAKE_RESPONSE_ID,), + None, + ) + self._unsubscribe_node_join = await self._controller.subscribe_to_messages( + self.node_join_available_message, None, (NODE_JOIN_ID,), None + ) + self._unsubscribe_node_rejoin = await self._controller.subscribe_to_messages( + self.node_rejoin_message, None, (NODE_REJOIN_ID,), None + ) + + async def _handle_stick_event(self, event: StickEvent) -> None: + """Handle stick events.""" + if event == StickEvent.CONNECTED: + await gather( + *[ + node.reconnect() + for node in self._nodes.values() + if not node.available + ] + ) + self._is_running = True + await self.discover_nodes() + elif event == StickEvent.DISCONNECTED: + await gather(*[node.disconnect() for node in self._nodes.values()]) + self._is_running = False + + async def node_awake_message(self, response: PlugwiseResponse) -> bool: + """Handle NodeAwakeResponse message.""" + if not isinstance(response, NodeAwakeResponse): + raise MessageError( + f"Invalid response message type ({response.__class__.__name__}) received, expected NodeAwakeResponse" + ) + mac = response.mac_decoded + if self._awake_discovery.get(mac) is None: + self._awake_discovery[mac] = response.timestamp - timedelta(seconds=15) + if mac in self._nodes: + if self._awake_discovery[mac] < ( + response.timestamp - timedelta(seconds=10) + ): + await self._notify_node_event_subscribers(NodeEvent.AWAKE, mac) + self._awake_discovery[mac] = response.timestamp + return True + if (address := self._register.network_address(mac)) is None: + if self._register.scan_completed: + return True + _LOGGER.debug( + "Skip node awake message for %s because network registry address is unknown", + mac, + ) + return True + + if self._nodes.get(mac) is None: + if ( + self._discover_sed_tasks.get(mac) is None + or self._discover_sed_tasks[mac].done() + ): + self._discover_sed_tasks[mac] = create_task( + self._discover_battery_powered_node(address, mac) + ) + else: + _LOGGER.debug("duplicate maintenance awake discovery for %s", mac) + return True + + async def node_join_available_message(self, response: PlugwiseResponse) -> bool: + """Handle NodeJoinAvailableResponse messages.""" + if not isinstance(response, NodeJoinAvailableResponse): + raise MessageError( + f"Invalid response message type ({response.__class__.__name__}) received, expected NodeJoinAvailableResponse" + ) + mac = response.mac_decoded + await self._notify_node_event_subscribers(NodeEvent.JOIN, mac) + return True + + async def node_rejoin_message(self, response: PlugwiseResponse) -> bool: + """Handle NodeRejoinResponse messages.""" + if not isinstance(response, NodeRejoinResponse): + raise MessageError( + f"Invalid response message type ({response.__class__.__name__}) received, expected NodeRejoinResponse" + ) + mac = response.mac_decoded + address = self._register.network_address(mac) + if (address := self._register.network_address(mac)) is not None: + if self._nodes.get(mac) is None: + if self._discover_sed_tasks.get(mac) is None: + self._discover_sed_tasks[mac] = create_task( + self._discover_battery_powered_node(address, mac) + ) + elif self._discover_sed_tasks[mac].done(): + self._discover_sed_tasks[mac] = create_task( + self._discover_battery_powered_node(address, mac) + ) + else: + _LOGGER.debug("duplicate awake discovery for %s", mac) + return True + else: + raise NodeError("Unknown network address for node {mac}") + return True + + def _unsubscribe_to_protocol_events(self) -> None: + """Unsubscribe to events from protocol.""" + if self._unsubscribe_node_awake is not None: + self._unsubscribe_node_awake() + self._unsubscribe_node_awake = None + if self._unsubscribe_stick_event is not None: + self._unsubscribe_stick_event() + self._unsubscribe_stick_event = None + + # endregion + + # region - Coordinator + async def discover_network_coordinator(self, load: bool = False) -> bool: + """Discover the Zigbee network coordinator (Circle+/Stealth+).""" + if self._controller.mac_coordinator is None: + raise NodeError("Unknown mac address for network coordinator.") + if load and await self._load_node(self._controller.mac_coordinator): + return True + + # Validate the network controller is online + # try to ping first and raise error at stick timeout + ping_request = NodePingRequest( + self._controller.send, + bytes(self._controller.mac_coordinator, UTF8), + retries=1, + ) + try: + ping_response = await ping_request.send() + except StickTimeout as err: + raise StickError( + "The zigbee network coordinator (Circle+/Stealth+) with mac " + + "'%s' did not respond to ping request. Make " + + "sure the Circle+/Stealth+ is within reach of the USB-stick !", + self._controller.mac_coordinator, + ) from err + if ping_response is None: + return False + + if await self._discover_node( + -1, self._controller.mac_coordinator, None, ping_first=False + ): + if load: + return await self._load_node(self._controller.mac_coordinator) + return True + return False + + # endregion + + # region - Nodes + def _create_node_object( + self, + mac: str, + address: int, + node_type: NodeType, + ) -> None: + """Create node object and update network registry.""" + if self._nodes.get(mac) is not None: + _LOGGER.debug( + "Skip creating node object because node object for mac %s already exists", + mac, + ) + return + node = get_plugwise_node( + mac, + address, + self._controller, + self._notify_node_event_subscribers, + node_type, + ) + if node is None: + _LOGGER.warning("Node %s of type %s is unsupported", mac, str(node_type)) + return + self._nodes[mac] = node + _LOGGER.debug("%s node %s added", node.__class__.__name__, mac) + self._register.update_network_registration(address, mac, node_type) + + if self._cache_enabled: + _LOGGER.debug( + "Enable caching for node %s to folder '%s'", + mac, + self._cache_folder, + ) + self._nodes[mac].cache_folder = self._cache_folder + self._nodes[mac].cache_folder_create = self._cache_folder_create + self._nodes[mac].cache_enabled = True + + async def _discover_battery_powered_node( + self, + address: int, + mac: str, + ) -> bool: + """Discover a battery powered node and add it to list of nodes. + + Return True if discovery succeeded. + """ + if not await self._discover_node( + address, mac, node_type=None, ping_first=False + ): + return False + if await self._load_node(mac): + await self._notify_node_event_subscribers(NodeEvent.AWAKE, mac) + return True + return False + + async def _discover_node( + self, + address: int, + mac: str, + node_type: NodeType | None, + ping_first: bool = True, + ) -> bool: + """Discover node and add it to list of nodes. + + Return True if discovery succeeded. + """ + _LOGGER.debug("Start discovery of node %s ", mac) + if self._nodes.get(mac) is not None: + _LOGGER.debug("Skip discovery of already known node %s ", mac) + return True + + if node_type is not None: + self._create_node_object(mac, address, node_type) + await self._notify_node_event_subscribers(NodeEvent.DISCOVERED, mac) + return True + + # Node type is unknown, so we need to discover it first + _LOGGER.debug("Starting the discovery of node %s", mac) + node_info, node_ping = await self._controller.get_node_details(mac, ping_first) + if node_info is None: + return False + self._create_node_object(mac, address, node_info.node_type) + + # Forward received NodeInfoResponse message to node + await self._nodes[mac].message_for_node(node_info) + if node_ping is not None: + await self._nodes[mac].message_for_node(node_ping) + await self._notify_node_event_subscribers(NodeEvent.DISCOVERED, mac) + return True + + async def _discover_registered_nodes(self) -> None: + """Discover nodes.""" + _LOGGER.debug("Start discovery of registered nodes") + counter = 0 + for address, registration in self._register.registry.items(): + mac, node_type = registration + if mac != "": + if self._nodes.get(mac) is None: + await self._discover_node(address, mac, node_type) + counter += 1 + await sleep(0) + _LOGGER.debug("Total %s registered node(s)", str(counter)) + + async def _load_node(self, mac: str) -> bool: + """Load node.""" + if self._nodes.get(mac) is None: + return False + if self._nodes[mac].is_loaded: + return True + if await self._nodes[mac].load(): + await self._notify_node_event_subscribers(NodeEvent.LOADED, mac) + return True + return False + + async def _load_discovered_nodes(self) -> bool: + """Load all nodes currently discovered.""" + _LOGGER.debug("_load_discovered_nodes | START | %s", len(self._nodes)) + for mac, node in self._nodes.items(): + _LOGGER.debug( + "_load_discovered_nodes | mac=%s | loaded=%s", mac, node.is_loaded + ) + + nodes_not_loaded = tuple( + mac for mac, node in self._nodes.items() if not node.is_loaded + ) + _LOGGER.debug("_load_discovered_nodes | nodes_not_loaded=%s", nodes_not_loaded) + load_result = await gather(*[self._load_node(mac) for mac in nodes_not_loaded]) + _LOGGER.debug("_load_discovered_nodes | load_result=%s", load_result) + result_index = 0 + for mac in nodes_not_loaded: + if load_result[result_index]: + await self._notify_node_event_subscribers(NodeEvent.LOADED, mac) + else: + _LOGGER.debug( + "_load_discovered_nodes | Load request for %s failed", mac + ) + result_index += 1 + _LOGGER.debug("_load_discovered_nodes | END") + return all(load_result) + + async def _unload_discovered_nodes(self) -> None: + """Unload all nodes.""" + await gather(*[node.unload() for node in self._nodes.values()]) + + # endregion + + # region - Network instance + async def start(self) -> None: + """Start and activate network.""" + self._register.quick_scan_finished(self._discover_registered_nodes) + self._register.full_scan_finished(self._discover_registered_nodes) + await self._register.start() + self._subscribe_to_protocol_events() + await self._subscribe_to_node_events() + self._is_running = True + + async def discover_nodes(self, load: bool = True) -> bool: + """Discover nodes.""" + await self.discover_network_coordinator(load=load) + if not self._is_running: + await self.start() + await self._discover_registered_nodes() + if load: + return await self._load_discovered_nodes() + return True + + async def stop(self) -> None: + """Stop network discovery.""" + _LOGGER.debug("Stopping") + for task in self._discover_sed_tasks.values(): + if not task.done(): + task.cancel() + self._is_running = False + self._unsubscribe_to_protocol_events() + await self._unload_discovered_nodes() + await self._register.stop() + _LOGGER.debug("Stopping finished") + + # endregion + + async def allow_join_requests(self, state: bool) -> None: + """Enable or disable Plugwise network.""" + request = CirclePlusAllowJoiningRequest(self._controller.send, state) + response = await request.send() + if (response := await request.send()) is None: + raise NodeError("No response to get notifications for join request.") + if response.response_type != NodeResponseType.JOIN_ACCEPTED: + raise MessageError( + f"Unknown NodeResponseType '{response.response_type.name}' received" + ) + + def subscribe_to_node_events( + self, + node_event_callback: Callable[[NodeEvent, str], Coroutine[Any, Any, None]], + events: tuple[NodeEvent, ...], + ) -> Callable[[], None]: + """Subscribe callback when specified NodeEvent occurs. + + Returns the function to be called to unsubscribe later. + """ + + def remove_subscription() -> None: + """Remove stick event subscription.""" + self._node_event_subscribers.pop(remove_subscription) + + self._node_event_subscribers[remove_subscription] = ( + node_event_callback, + events, + ) + return remove_subscription + + async def _notify_node_event_subscribers(self, event: NodeEvent, mac: str) -> None: + """Call callback for node event subscribers.""" + callback_list: list[Coroutine[Any, Any, None]] = [] + for callback, events in self._node_event_subscribers.values(): + if event in events: + _LOGGER.debug("Publish %s for %s", event, mac) + callback_list.append(callback(event, mac)) + if len(callback_list) > 0: + await gather(*callback_list) diff --git a/plugwise_usb/network/cache.py b/plugwise_usb/network/cache.py new file mode 100644 index 000000000..a9fb1eda2 --- /dev/null +++ b/plugwise_usb/network/cache.py @@ -0,0 +1,83 @@ +"""Caching for plugwise network.""" + +from __future__ import annotations + +import logging + +from ..api import NodeType +from ..constants import CACHE_DATA_SEPARATOR +from ..helpers.cache import PlugwiseCache + +_LOGGER = logging.getLogger(__name__) +_NETWORK_CACHE_FILE_NAME = "nodes.cache" + + +class NetworkRegistrationCache(PlugwiseCache): + """Class to cache node network information.""" + + def __init__(self, cache_root_dir: str = "") -> None: + """Initialize NetworkCache class.""" + super().__init__(_NETWORK_CACHE_FILE_NAME, cache_root_dir) + self._registrations: dict[int, tuple[str, NodeType | None]] = {} + + @property + def registrations(self) -> dict[int, tuple[str, NodeType | None]]: + """Cached network information.""" + return self._registrations + + async def save_cache(self) -> None: + """Save the node information to file.""" + cache_data_to_save: dict[str, str] = {} + for address in range(-1, 64, 1): + mac, node_type = self._registrations.get(address, ("", None)) + if node_type is None: + node_value = "" + else: + node_value = str(node_type) + cache_data_to_save[str(address)] = f"{mac}{CACHE_DATA_SEPARATOR}{node_value}" + await self.write_cache(cache_data_to_save) + + async def clear_cache(self) -> None: + """Clear current cache.""" + self._registrations = {} + await self.delete_cache() + + async def restore_cache(self) -> None: + """Load the previously stored information.""" + data: dict[str, str] = await self.read_cache() + self._registrations = {} + for _key, _data in data.items(): + address = int(_key) + try: + if CACHE_DATA_SEPARATOR in _data: + values = _data.split(CACHE_DATA_SEPARATOR) + else: + # legacy data separator can by remove at next version + values = _data.split(";") + mac = values[0] + node_type: NodeType | None = None + if values[1] != "": + node_type = NodeType[values[1][9:]] + self._registrations[address] = (mac, node_type) + _LOGGER.debug( + "Restore registry address %s with mac %s with node type %s", + address, + mac if mac != "" else "", + str(node_type), + ) + except (KeyError, IndexError): + _LOGGER.warning( + "Skip invalid data '%s' in cache file '%s'", + _data, + self._cache_file, + ) + + def update_registration( + self, address: int, mac: str, node_type: NodeType | None + ) -> None: + """Save node information in cache.""" + if self._registrations.get(address) is not None: + _, current_node_type = self._registrations[address] + if current_node_type is not None and node_type is None: + return + self._registrations[address] = (mac, node_type) diff --git a/plugwise_usb/network/py.typed b/plugwise_usb/network/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/plugwise_usb/network/registry.py b/plugwise_usb/network/registry.py new file mode 100644 index 000000000..70f478298 --- /dev/null +++ b/plugwise_usb/network/registry.py @@ -0,0 +1,301 @@ +"""Register of network configuration.""" + +from __future__ import annotations + +from asyncio import Task, create_task, sleep +from collections.abc import Awaitable, Callable +from copy import deepcopy +import logging + +from ..api import NodeType +from ..constants import UTF8 +from ..exceptions import CacheError, NodeError +from ..helpers.util import validate_mac +from ..messages.requests import ( + CirclePlusScanRequest, + NodeAddRequest, + NodeRemoveRequest, + PlugwiseRequest, +) +from ..messages.responses import NodeResponseType, PlugwiseResponse +from .cache import NetworkRegistrationCache + +_LOGGER = logging.getLogger(__name__) + + +class StickNetworkRegister: + """Network register.""" + + def __init__( + self, + mac_network_controller: bytes, + send_fn: Callable[[PlugwiseRequest, bool], Awaitable[PlugwiseResponse | None]], + ) -> None: + """Initialize network register.""" + self._mac_nc = mac_network_controller + self._send_to_controller = send_fn + self._cache_folder: str = "" + self._cache_restored = False + self._cache_enabled = False + self._network_cache: NetworkRegistrationCache | None = None + self._loaded: bool = False + self._registry: dict[int, tuple[str, NodeType | None]] = {} + self._first_free_address: int = 65 + self._registration_task: Task[None] | None = None + self._quick_scan_finished: Callable[[], Awaitable[None]] | None = None + self._full_scan_finished: Callable[[], Awaitable[None]] | None = None + self._scan_completed = False + + # region Properties + + @property + def cache_enabled(self) -> bool: + """Return usage of cache.""" + return self._cache_enabled + + @cache_enabled.setter + def cache_enabled(self, enable: bool = True) -> None: + """Enable or disable usage of cache.""" + if enable and not self._cache_enabled: + _LOGGER.debug("Enable cache") + self._network_cache = NetworkRegistrationCache(self._cache_folder) + elif not enable and self._cache_enabled: + _LOGGER.debug("Disable cache") + self._cache_enabled = enable + + async def initialize_cache(self, create_root_folder: bool = False) -> None: + """Initialize cache.""" + if not self._cache_enabled or self._network_cache is None: + raise CacheError("Unable to initialize cache, enable cache first.") + await self._network_cache.initialize_cache(create_root_folder) + + @property + def cache_folder(self) -> str: + """Path to folder to store cached data.""" + return self._cache_folder + + @cache_folder.setter + def cache_folder(self, cache_folder: str) -> None: + """Set path to cache data.""" + if cache_folder == self._cache_folder: + return + self._cache_folder = cache_folder + if self._network_cache is not None: + self._network_cache.cache_root_directory = cache_folder + + @property + def registry(self) -> dict[int, tuple[str, NodeType | None]]: + """Return dictionary with all joined nodes.""" + return deepcopy(self._registry) + + @property + def scan_completed(self) -> bool: + """Indicate if scan is completed.""" + return self._scan_completed + + def quick_scan_finished(self, callback: Callable[[], Awaitable[None]]) -> None: + """Register method to be called when quick scan is finished.""" + self._quick_scan_finished = callback + + def full_scan_finished(self, callback: Callable[[], Awaitable[None]]) -> None: + """Register method to be called when full scan is finished.""" + self._full_scan_finished = callback + + # endregion + + async def start(self) -> None: + """Initialize load the network registry.""" + if self._cache_enabled: + await self.restore_network_cache() + await self.load_registry_from_cache() + await self.update_missing_registrations(quick=True) + + async def restore_network_cache(self) -> None: + """Restore previously saved cached network and node information.""" + if self._network_cache is None: + _LOGGER.error("Unable to restore cache when cache is not initialized") + return + if not self._cache_restored: + if not self._network_cache.initialized: + await self._network_cache.initialize_cache() + await self._network_cache.restore_cache() + self._cache_restored = True + + async def load_registry_from_cache(self) -> None: + """Load network registry from cache.""" + if self._network_cache is None: + _LOGGER.error( + "Unable to restore network registry because cache is not initialized" + ) + return + if self._cache_restored: + return + for address, registration in self._network_cache.registrations.items(): + mac, node_type = registration + if self._registry.get(address) is None: + self._registry[address] = (mac, node_type) + + async def retrieve_network_registration( + self, address: int, retry: bool = True + ) -> tuple[int, str] | None: + """Return the network mac registration of specified address.""" + request = CirclePlusScanRequest(self._send_to_controller, self._mac_nc, address) + if (response := await request.send()) is None: + if retry: + return await self.retrieve_network_registration(address, retry=False) + return None + address = response.network_address + mac_of_node = response.registered_mac + if (mac_of_node := response.registered_mac) == "FFFFFFFFFFFFFFFF": + mac_of_node = "" + return (address, mac_of_node) + + def network_address(self, mac: str) -> int | None: + """Return the network registration address for given mac.""" + for address, registration in self._registry.items(): + registered_mac, _ = registration + if mac == registered_mac: + return address + return None + + def network_controller(self) -> tuple[str, NodeType | None]: + """Return the registration for the network controller.""" + if self._registry.get(-1) is None: + raise NodeError("Unable to return network controller details") + return self.registry[-1] + + def update_network_registration( + self, address: int, mac: str, node_type: NodeType | None + ) -> None: + """Add a network registration.""" + if self._registry.get(address) is not None: + _, current_type = self._registry[address] + if current_type is not None and node_type is None: + return + self._registry[address] = (mac, node_type) + if self._network_cache is not None: + self._network_cache.update_registration(address, mac, node_type) + + async def update_missing_registrations(self, quick: bool = False) -> None: + """Retrieve all unknown network registrations from network controller.""" + for address in range(0, 64): + if self._registry.get(address) is not None and not quick: + mac, _ = self._registry[address] + if mac == "": + self._first_free_address = min(self._first_free_address, address) + continue + registration = await self.retrieve_network_registration(address, False) + if registration is not None: + address, mac = registration + if mac == "": + self._first_free_address = min(self._first_free_address, address) + if quick: + break + _LOGGER.debug( + "Network registration at address %s is %s", + str(address), + "'empty'" if mac == "" else f"set to {mac}", + ) + self.update_network_registration(address, mac, None) + await sleep(0.1) + if not quick: + await sleep(10) + if quick: + if self._registration_task is None or self._registration_task.done(): + self._registration_task = create_task( + self.update_missing_registrations(quick=False) + ) + if self._quick_scan_finished is not None: + await self._quick_scan_finished() + self._quick_scan_finished = None + _LOGGER.info("Quick network registration discovery finished") + else: + _LOGGER.debug("Full network registration finished") + self._scan_completed = True + if self._cache_enabled: + _LOGGER.debug("Full network registration finished, save to cache") + await self.save_registry_to_cache() + _LOGGER.debug("Full network registration finished, post") + _LOGGER.info("Full network discovery completed") + if self._full_scan_finished is not None: + await self._full_scan_finished() + self._full_scan_finished = None + + def _stop_registration_task(self) -> None: + """Stop the background registration task.""" + if self._registration_task is None: + return + self._registration_task.cancel() + + async def save_registry_to_cache(self) -> None: + """Save network registry to cache.""" + if self._network_cache is None: + _LOGGER.error( + "Unable to save network registry because cache is not initialized" + ) + return + _LOGGER.debug( + "save_registry_to_cache starting for %s items", str(len(self._registry)) + ) + for address, registration in self._registry.items(): + mac, node_type = registration + self._network_cache.update_registration(address, mac, node_type) + await self._network_cache.save_cache() + _LOGGER.debug("save_registry_to_cache finished") + + async def register_node(self, mac: str) -> int: + """Register node to Plugwise network and return network address.""" + if not validate_mac(mac): + raise NodeError(f"Invalid mac '{mac}' to register") + + request = NodeAddRequest(self._send_to_controller, bytes(mac, UTF8), True) + response = await request.send() + if response is None or response.ack_id != NodeResponseType.JOIN_ACCEPTED: + raise NodeError(f"Failed to register node {mac}") + self.update_network_registration(self._first_free_address, mac, None) + self._first_free_address += 1 + return self._first_free_address - 1 + + async def unregister_node(self, mac: str) -> None: + """Unregister node from current Plugwise network.""" + if not validate_mac(mac): + raise NodeError(f"Invalid mac '{mac}' to unregister") + + mac_registered = False + for registration in self._registry.values(): + if mac == registration[0]: + mac_registered = True + break + if not mac_registered: + raise NodeError(f"No existing registration '{mac}' found to unregister") + + request = NodeRemoveRequest(self._send_to_controller, self._mac_nc, mac) + response = await request.send() + if (response := await request.send()) is None: + raise NodeError( + f"The Zigbee network coordinator '{self._mac_nc!r}'" + + f" did not respond to unregister node '{mac}'" + ) + if response.status.value != 1: + raise NodeError( + f"The Zigbee network coordinator '{self._mac_nc!r}'" + + f" failed to unregister node '{mac}'" + ) + if (address := self.network_address(mac)) is not None: + self.update_network_registration(address, mac, None) + + async def clear_register_cache(self) -> None: + """Clear current cache.""" + if self._network_cache is not None: + await self._network_cache.clear_cache() + self._cache_restored = False + + async def stop(self) -> None: + """Unload the network registry.""" + self._stop_registration_task() + if ( + self._cache_enabled + and self._network_cache is not None + and self._network_cache.initialized + ): + await self.save_registry_to_cache() diff --git a/plugwise_usb/nodes/__init__.py b/plugwise_usb/nodes/__init__.py index 8aac298cf..0258e1b81 100644 --- a/plugwise_usb/nodes/__init__.py +++ b/plugwise_usb/nodes/__init__.py @@ -1,295 +1,68 @@ -"""Plugwise nodes.""" -from datetime import datetime -import logging - -from ..constants import ( - FEATURE_AVAILABLE, - FEATURE_PING, - FEATURE_RELAY, - FEATURE_RSSI_IN, - FEATURE_RSSI_OUT, - PRIORITY_LOW, - UTF8_DECODE, -) -from ..messages.requests import NodeFeaturesRequest, NodeInfoRequest, NodePingRequest -from ..messages.responses import ( - NodeFeaturesResponse, - NodeInfoResponse, - NodeJoinAckResponse, - NodePingResponse, -) -from ..util import validate_mac, version_to_model - -_LOGGER = logging.getLogger(__name__) - - -class PlugwiseNode: - """Base class for a Plugwise node.""" - - def __init__(self, mac, address, message_sender): - mac = mac.upper() - if not validate_mac(mac): - _LOGGER.warning( - "MAC address is in unexpected format: %s", - str(mac), - ) - self._mac = bytes(mac, encoding=UTF8_DECODE) - self.message_sender = message_sender - self._features = () - self._address = address - self._callbacks = {} - self._last_update = None - self._available = False - self._battery_powered = False - self._measures_power = False - self._rssi_in = None - self._rssi_out = None - self._ping = None - self._node_type = None - self._hardware_version = None - self._firmware_version = None - self._relay_state = False - self._last_log_address = None - self._device_features = None - - @property - def available(self) -> bool: - """Current network state of plugwise node.""" - return self._available - - @available.setter - def available(self, state: bool): - """Set current network availability state of plugwise node.""" - if state: - if not self._available: - self._available = True - _LOGGER.info( - "Marking node %s available", - self.mac, - ) - self.do_callback(FEATURE_AVAILABLE["id"]) - else: - if self._available: - self._available = False - _LOGGER.info( - "Marking node %s unavailable", - self.mac, - ) - self.do_callback(FEATURE_AVAILABLE["id"]) - - @property - def battery_powered(self) -> bool: - """Return True if node is a SED (battery powered) device.""" - return self._battery_powered - - @property - def hardware_model(self) -> str: - """Return hardware model.""" - if self._hardware_version: - return version_to_model(self._hardware_version) - return None - - @property - def hardware_version(self) -> str: - """Return hardware version.""" - if self._hardware_version is not None: - return self._hardware_version - return "Unknown" - - @property - def features(self) -> tuple: - """Return the abstracted features supported by this plugwise device.""" - return self._features - - @property - def firmware_version(self) -> str: - """Return firmware version.""" - if self._firmware_version is not None: - return str(self._firmware_version) - return "Unknown" - - @property - def last_update(self) -> datetime: - """Return datetime of last received update.""" - return self._last_update - - @property - def mac(self) -> str: - """Return the MAC address in string.""" - return self._mac.decode(UTF8_DECODE) - - @property - def measures_power(self) -> bool: - """Return True if node can measure power usage.""" - return self._measures_power - - @property - def name(self) -> str: - """Return unique name.""" - return self.hardware_model + " (" + str(self._address) + ")" - - @property - def ping(self) -> int: - """Return ping roundtrip in ms.""" - if self._ping is not None: - return self._ping - return 0 - - @property - def rssi_in(self) -> int: - """Return inbound RSSI level.""" - if self._rssi_in is not None: - return self._rssi_in - return 0 - - @property - def rssi_out(self) -> int: - """Return outbound RSSI level, based on inbound RSSI level of neighbor node.""" - if self._rssi_out is not None: - return self._rssi_out - return 0 - - def do_ping(self, callback=None): - """Send network ping message to node.""" - self._request_ping(callback, True) - - def _request_info(self, callback=None): - """Request info from node.""" - self.message_sender( - NodeInfoRequest(self._mac), - callback, - 0, - PRIORITY_LOW, +"""Plugwise node devices.""" + +from __future__ import annotations + +from collections.abc import Awaitable, Callable + +from ..api import NodeEvent, NodeType, PlugwiseNode +from ..connection import StickController +from .circle import PlugwiseCircle +from .circle_plus import PlugwiseCirclePlus +from .scan import PlugwiseScan +from .sense import PlugwiseSense +from .stealth import PlugwiseStealth +from .switch import PlugwiseSwitch + + +def get_plugwise_node( + mac: str, + address: int, + controller: StickController, + loaded_callback: Callable[[NodeEvent, str], Awaitable[None]], + node_type: NodeType, +) -> PlugwiseNode | None: + """Return an initialized plugwise node class based on given the node type.""" + + if node_type == NodeType.CIRCLE_PLUS: + return PlugwiseCirclePlus( + mac, + address, + controller, + loaded_callback, ) - - def _request_features(self, callback=None): - """Request supported features for this node.""" - self.message_sender( - NodeFeaturesRequest(self._mac), - callback, + if node_type == NodeType.CIRCLE: + return PlugwiseCircle( + mac, + address, + controller, + loaded_callback, ) - - def _request_ping(self, callback=None, ignore_sensor=True): - """Ping node.""" - if ignore_sensor or FEATURE_PING["id"] in self._callbacks: - self.message_sender( - NodePingRequest(self._mac), - callback, - ) - - def message_for_node(self, message): - """Process received message.""" - if message.mac == self._mac: - if message.timestamp is not None: - _LOGGER.debug( - "Previous update %s of node %s, last message %s", - str(self._last_update), - self.mac, - str(message.timestamp), - ) - self._last_update = message.timestamp - if not self._available: - self.available = True - self._request_info() - if isinstance(message, NodePingResponse): - self._process_ping_response(message) - elif isinstance(message, NodeInfoResponse): - self._process_info_response(message) - elif isinstance(message, NodeFeaturesResponse): - self._process_features_response(message) - elif isinstance(message, NodeJoinAckResponse): - self._process_join_ack_response(message) - else: - self.message_for_circle(message) - self.message_for_sed(message) - else: - _LOGGER.debug( - "Skip message, mac of node (%s) != mac at message (%s)", - message.mac.decode(UTF8_DECODE), - self.mac, - ) - - def message_for_circle(self, message): - """Pass messages to PlugwiseCircle class""" - - def message_for_sed(self, message): - """Pass messages to NodeSED class""" - - def subscribe_callback(self, callback, sensor) -> bool: - """Subscribe callback to execute when state change happens.""" - if sensor in self._features: - if sensor not in self._callbacks: - self._callbacks[sensor] = [] - self._callbacks[sensor].append(callback) - return True - return False - - def unsubscribe_callback(self, callback, sensor): - """Unsubscribe callback to execute when state change happens.""" - if sensor in self._callbacks: - self._callbacks[sensor].remove(callback) - - def do_callback(self, sensor): - """Execute callbacks registered for specified callback type.""" - if sensor in self._callbacks: - for callback in self._callbacks[sensor]: - try: - callback(None) - # TODO: narrow exception - except Exception as err: # pylint: disable=broad-except - _LOGGER.error( - "Error while executing all callback : %s", - err, - ) - - def _process_join_ack_response(self, message): - """Process join acknowledge response message""" - _LOGGER.info( - "Node %s has (re)joined plugwise network", - self.mac, + if node_type == NodeType.SWITCH: + return PlugwiseSwitch( + mac, + address, + controller, + loaded_callback, ) - - def _process_ping_response(self, message): - """Process ping response message.""" - if self._rssi_in != message.rssi_in.value: - self._rssi_in = message.rssi_in.value - self.do_callback(FEATURE_RSSI_IN["id"]) - if self._rssi_out != message.rssi_out.value: - self._rssi_out = message.rssi_out.value - self.do_callback(FEATURE_RSSI_OUT["id"]) - if self._ping != message.ping_ms.value: - self._ping = message.ping_ms.value - self.do_callback(FEATURE_PING["id"]) - - def _process_info_response(self, message): - """Process info response message.""" - _LOGGER.debug( - "Response info message for node %s, last log address %s", - self.mac, - str(message.last_logaddr.value), + if node_type == NodeType.SENSE: + return PlugwiseSense( + mac, + address, + controller, + loaded_callback, ) - if message.relay_state.serialize() == b"01": - if not self._relay_state: - self._relay_state = True - self.do_callback(FEATURE_RELAY["id"]) - else: - if self._relay_state: - self._relay_state = False - self.do_callback(FEATURE_RELAY["id"]) - self._hardware_version = message.hw_ver.value.decode(UTF8_DECODE) - self._firmware_version = message.fw_ver.value - self._node_type = message.node_type.value - if self._last_log_address != message.last_logaddr.value: - self._last_log_address = message.last_logaddr.value - _LOGGER.debug("Node type = %s", self.hardware_model) - if not self._battery_powered: - _LOGGER.debug("Relay state = %s", str(self._relay_state)) - _LOGGER.debug("Hardware version = %s", str(self._hardware_version)) - _LOGGER.debug("Firmware version = %s", str(self._firmware_version)) - - def _process_features_response(self, message): - """Process features message.""" - _LOGGER.warning( - "Node %s supports features %s", self.mac, str(message.features.value) + if node_type == NodeType.SCAN: + return PlugwiseScan( + mac, + address, + controller, + loaded_callback, + ) + if node_type == NodeType.STEALTH: + return PlugwiseStealth( + mac, + address, + controller, + loaded_callback, ) - self._device_features = message.features.value + return None diff --git a/plugwise_usb/nodes/celsius.py b/plugwise_usb/nodes/celsius.py new file mode 100644 index 000000000..815c0f059 --- /dev/null +++ b/plugwise_usb/nodes/celsius.py @@ -0,0 +1,48 @@ +"""Plugwise Celsius node. + +TODO: Finish node +""" +from __future__ import annotations + +import logging +from typing import Final + +from ..api import NodeEvent, NodeFeature +from ..nodes.sed import NodeSED +from .helpers.firmware import CELSIUS_FIRMWARE_SUPPORT + +_LOGGER = logging.getLogger(__name__) + +CELSIUS_FEATURES: Final = ( + NodeFeature.INFO, + NodeFeature.TEMPERATURE, + NodeFeature.HUMIDITY, +) + + +class PlugwiseCelsius(NodeSED): + """provides interface to the Plugwise Celsius nodes.""" + + async def load(self) -> bool: + """Load and activate node features.""" + if self._loaded: + return True + self._node_info.is_battery_powered = True + + if self._cache_enabled: + _LOGGER.debug( + "Load Celsius node %s from cache", self._node_info.mac + ) + if await self._load_from_cache(): + pass + + self._loaded = True + self._setup_protocol( + CELSIUS_FIRMWARE_SUPPORT, + (NodeFeature.INFO, NodeFeature.TEMPERATURE), + ) + if await self.initialize(): + await self._loaded_callback(NodeEvent.LOADED, self.mac) + return True + _LOGGER.debug("Load of Celsius node %s failed", self._node_info.mac) + return False diff --git a/plugwise_usb/nodes/circle.py b/plugwise_usb/nodes/circle.py index 97081bd4b..5cdedb24f 100644 --- a/plugwise_usb/nodes/circle.py +++ b/plugwise_usb/nodes/circle.py @@ -1,882 +1,1133 @@ -"""Plugwise Circle node object.""" -from datetime import datetime, timedelta +"""Plugwise Circle node class.""" + +from __future__ import annotations + +from asyncio import Task, create_task, gather +from collections.abc import Awaitable, Callable +from dataclasses import replace +from datetime import UTC, datetime +from functools import wraps import logging +from typing import Any, TypeVar, cast +from ..api import ( + EnergyStatistics, + NodeEvent, + NodeFeature, + NodeInfo, + NodeType, + PowerStatistics, + RelayConfig, + RelayState, +) +from ..connection import StickController from ..constants import ( - FEATURE_ENERGY_CONSUMPTION_TODAY, - FEATURE_PING, - FEATURE_POWER_CONSUMPTION_CURRENT_HOUR, - FEATURE_POWER_CONSUMPTION_PREVIOUS_HOUR, - FEATURE_POWER_CONSUMPTION_TODAY, - FEATURE_POWER_CONSUMPTION_YESTERDAY, - FEATURE_POWER_PRODUCTION_CURRENT_HOUR, - FEATURE_POWER_USE, - FEATURE_POWER_USE_LAST_8_SEC, - FEATURE_RELAY, - FEATURE_RSSI_IN, - FEATURE_RSSI_OUT, MAX_TIME_DRIFT, - MESSAGE_TIME_OUT, - PRIORITY_HIGH, - PRIORITY_LOW, + MINIMAL_POWER_UPDATE, PULSES_PER_KW_SECOND, - RELAY_SWITCHED_OFF, - RELAY_SWITCHED_ON, + SECOND_IN_NANOSECONDS, ) +from ..exceptions import FeatureError, NodeError from ..messages.requests import ( - CircleCalibrationRequest, CircleClockGetRequest, CircleClockSetRequest, - CircleEnergyCountersRequest, + CircleEnergyLogsRequest, CirclePowerUsageRequest, - CircleSwitchRelayRequest, + CircleRelayInitStateRequest, + CircleRelaySwitchRequest, + EnergyCalibrationRequest, + NodeInfoRequest, ) -from ..messages.responses import ( - CircleCalibrationResponse, - CircleClockResponse, - CircleEnergyCountersResponse, - CirclePowerUsageResponse, - NodeAckLargeResponse, -) -from ..nodes import PlugwiseNode +from ..messages.responses import NodeInfoResponse, NodeResponse, NodeResponseType +from .helpers import EnergyCalibration, raise_not_loaded +from .helpers.counter import EnergyCounters +from .helpers.firmware import CIRCLE_FIRMWARE_SUPPORT +from .helpers.pulses import PulseLogRecord, calc_log_address +from .node import PlugwiseBaseNode + +CACHE_CURRENT_LOG_ADDRESS = "current_log_address" +CACHE_CALIBRATION_GAIN_A = "calibration_gain_a" +CACHE_CALIBRATION_GAIN_B = "calibration_gain_b" +CACHE_CALIBRATION_NOISE = "calibration_noise" +CACHE_CALIBRATION_TOT = "calibration_tot" +CACHE_ENERGY_COLLECTION = "energy_collection" +CACHE_RELAY = "relay" +CACHE_RELAY_INIT = "relay_init" +FuncT = TypeVar("FuncT", bound=Callable[..., Any]) _LOGGER = logging.getLogger(__name__) -class PlugwiseCircle(PlugwiseNode): - """provides interface to the Plugwise Circle nodes and base class for Circle+ nodes""" - - def __init__(self, mac, address, message_sender): - super().__init__(mac, address, message_sender) - self._features = ( - FEATURE_ENERGY_CONSUMPTION_TODAY["id"], - FEATURE_PING["id"], - FEATURE_POWER_USE["id"], - FEATURE_POWER_USE_LAST_8_SEC["id"], - FEATURE_POWER_CONSUMPTION_CURRENT_HOUR["id"], - FEATURE_POWER_CONSUMPTION_PREVIOUS_HOUR["id"], - FEATURE_POWER_CONSUMPTION_TODAY["id"], - FEATURE_POWER_CONSUMPTION_YESTERDAY["id"], - FEATURE_POWER_PRODUCTION_CURRENT_HOUR["id"], - # FEATURE_POWER_PRODUCTION_PREVIOUS_HOUR["id"], - FEATURE_RSSI_IN["id"], - FEATURE_RSSI_OUT["id"], - FEATURE_RELAY["id"], - ) - self._last_collected_address = None - self._last_collected_address_slot = 0 - self._last_collected_address_timestamp = datetime(2000, 1, 1) - self._energy_consumption_today_reset = datetime.now().replace( - hour=0, minute=0, second=0, microsecond=0 - ) - self._energy_memory = {} - self._energy_history = {} - self._energy_history_failed_address = [] - self._energy_last_collected_timestamp = datetime(2000, 1, 1) - self._energy_last_collected_count = 0 - self._energy_ratelimit_collection_timestamp = datetime.utcnow() - self._energy_last_rollover_timestamp = datetime.utcnow() - self._energy_pulses_midnight_rollover = datetime.utcnow() - self._energy_last_local_hour = datetime.now().hour - self._energy_last_populated_slot = 0 - self._energy_pulses_current_hour = None - self._energy_pulses_prev_hour = None - self._energy_pulses_today_hourly = None - self._energy_pulses_today_now = None - self._energy_pulses_yesterday = None - self._new_relay_state = False - self._new_relay_stamp = datetime.now() - timedelta(seconds=MESSAGE_TIME_OUT) - self._pulses_1s = None - self._pulses_8s = None - self._pulses_produced_1h = None - self.calibration = False - self._gain_a = None - self._gain_b = None - self._off_noise = None - self._off_tot = None - self._measures_power = True - self._last_log_collected = False - self.timezone_delta = datetime.now().replace( - minute=0, second=0, microsecond=0 - ) - datetime.utcnow().replace(minute=0, second=0, microsecond=0) - self._clock_offset = None - self._last_clock_sync_day = datetime.now().day - self.get_clock(self.sync_clock) - self._request_calibration() +def raise_calibration_missing(func: FuncT) -> FuncT: + """Validate energy calibration settings are available.""" - @property - def current_power_usage(self): - """Returns power usage during the last second in Watts - Based on last received power usage information - """ - if self._pulses_1s is not None: - return self.pulses_to_kws(self._pulses_1s) * 1000 - return None + @wraps(func) + def decorated(*args: Any, **kwargs: Any) -> Any: + if args[0].calibrated is None: + raise NodeError("Energy calibration settings are missing") + return func(*args, **kwargs) - @property - def current_power_usage_8_sec(self): - """Returns power usage during the last 8 second in Watts - Based on last received power usage information - """ - if self._pulses_8s is not None: - return self.pulses_to_kws(self._pulses_8s, 8) * 1000 - return None + return cast(FuncT, decorated) + + +class PlugwiseCircle(PlugwiseBaseNode): + """Plugwise Circle node.""" + + def __init__( + self, + mac: str, + address: int, + controller: StickController, + loaded_callback: Callable[[NodeEvent, str], Awaitable[None]], + ): + """Initialize base class for Sleeping End Device.""" + super().__init__(mac, address, controller, loaded_callback) + + # Relay + self._relay_state: RelayState = RelayState() + self._relay_config: RelayConfig = RelayConfig() + + # Power + self._power: PowerStatistics = PowerStatistics() + self._calibration: EnergyCalibration | None = None + + # Energy + self._energy_counters = EnergyCounters(mac) + self._retrieve_energy_logs_task: None | Task[None] = None + self._last_energy_log_requested: bool = False + + self._group_member: list[int] = [] + + # region Properties @property - def energy_consumption_today(self) -> float: - """Returns total energy consumption since midnight in kWh""" - if self._energy_pulses_today_now is not None: - return self.pulses_to_kws(self._energy_pulses_today_now, 3600) - return None + def calibrated(self) -> bool: + """State of calibration.""" + if self._calibration is not None: + return True + return False @property - def energy_consumption_today_last_reset(self): - """Last reset of total energy consumption today""" - return self._energy_consumption_today_reset + def energy(self) -> EnergyStatistics: + """Energy statistics.""" + return self._energy_counters.energy_statistics @property - def power_consumption_current_hour(self): - """Returns the power usage during this running hour in kWh - Based on last received power usage information - """ - if self._energy_pulses_current_hour is not None: - return self.pulses_to_kws(self._energy_pulses_current_hour, 3600) - return None + @raise_not_loaded + def energy_consumption_interval(self) -> int | None: + """Interval (minutes) energy consumption counters are locally logged at Circle devices.""" + if NodeFeature.ENERGY not in self._features: + raise NodeError(f"Energy log interval is not supported for node {self.mac}") + return self._energy_counters.consumption_interval @property - def power_consumption_previous_hour(self): - """Returns power consumption during the previous hour in kWh""" - if self._energy_pulses_prev_hour is not None: - return self.pulses_to_kws(self._energy_pulses_prev_hour, 3600) - return None + def energy_production_interval(self) -> int | None: + """Interval (minutes) energy production counters are locally logged at Circle devices.""" + if NodeFeature.ENERGY not in self._features: + raise NodeError(f"Energy log interval is not supported for node {self.mac}") + return self._energy_counters.production_interval @property - def power_consumption_today(self): - """Total power consumption during today in kWh""" - if self._energy_pulses_today_hourly is not None: - return self.pulses_to_kws(self._energy_pulses_today_hourly, 3600) - return None + @raise_not_loaded + def power(self) -> PowerStatistics: + """Power statistics.""" + return self._power @property - def power_consumption_yesterday(self): - """Total power consumption of yesterday in kWh""" - if self._energy_pulses_yesterday is not None: - return self.pulses_to_kws(self._energy_pulses_yesterday, 3600) - return None + @raise_not_loaded + def relay(self) -> bool: + """Current value of relay.""" + return bool(self._relay_state.state) @property - def power_production_current_hour(self): - """Returns the power production during this running hour in kWh - Based on last received power usage information - """ - if self._pulses_produced_1h is not None: - return self.pulses_to_kws(self._pulses_produced_1h, 3600) - return None + @raise_not_loaded + def relay_config(self) -> RelayConfig: + """Configuration state of relay.""" + if NodeFeature.RELAY_INIT not in self._features: + raise FeatureError( + f"Configuration of relay is not supported for device {self.name}" + ) + return self._relay_config @property - def relay_state(self) -> bool: - """Return last known relay state or the new switch state by anticipating - the acknowledge for new state is getting in before message timeout. - """ - if self._new_relay_stamp + timedelta(seconds=MESSAGE_TIME_OUT) > datetime.now(): - return self._new_relay_state + @raise_not_loaded + def relay_state(self) -> RelayState: + """State of relay.""" return self._relay_state - @relay_state.setter - def relay_state(self, state): - """Request the relay to switch state.""" - self._request_switch(state) - self._new_relay_state = state - self._new_relay_stamp = datetime.now() - if state != self._relay_state: - self.do_callback(FEATURE_RELAY["id"]) - - def _request_calibration(self, callback=None): - """Request calibration info""" - self.message_sender( - CircleCalibrationRequest(self._mac), - callback, - 0, - PRIORITY_HIGH, + @raise_not_loaded + async def relay_off(self) -> None: + """Switch relay off.""" + await self.set_relay(False) + + @raise_not_loaded + async def relay_on(self) -> None: + """Switch relay on.""" + await self.set_relay(True) + + @raise_not_loaded + async def relay_init_off(self) -> None: + """Switch relay off.""" + await self._relay_init_set(False) + + @raise_not_loaded + async def relay_init_on(self) -> None: + """Switch relay on.""" + await self._relay_init_set(True) + + # endregion + + async def calibration_update(self) -> bool: + """Retrieve and update calibration settings. Returns True if successful.""" + _LOGGER.debug( + "Start updating energy calibration for %s", + self._mac_in_str, + ) + request = EnergyCalibrationRequest(self._send, self._mac_in_bytes) + if (calibration_response := await request.send()) is None: + _LOGGER.warning( + "Retrieving energy calibration information for %s failed", + self.name, + ) + await self._available_update_state(False) + return False + await self._available_update_state(True, calibration_response.timestamp) + await self._calibration_update_state( + calibration_response.gain_a, + calibration_response.gain_b, + calibration_response.off_noise, + calibration_response.off_tot, + ) + _LOGGER.debug( + "Updating energy calibration for %s succeeded", + self._mac_in_str, + ) + return True + + async def _calibration_load_from_cache(self) -> bool: + """Load calibration settings from cache.""" + cal_gain_a: float | None = None + cal_gain_b: float | None = None + cal_noise: float | None = None + cal_tot: float | None = None + if (gain_a := self._get_cache(CACHE_CALIBRATION_GAIN_A)) is not None: + cal_gain_a = float(gain_a) + if (gain_b := self._get_cache(CACHE_CALIBRATION_GAIN_B)) is not None: + cal_gain_b = float(gain_b) + if (noise := self._get_cache(CACHE_CALIBRATION_NOISE)) is not None: + cal_noise = float(noise) + if (tot := self._get_cache(CACHE_CALIBRATION_TOT)) is not None: + cal_tot = float(tot) + + # Restore calibration + result = await self._calibration_update_state( + cal_gain_a, + cal_gain_b, + cal_noise, + cal_tot, ) + if result: + _LOGGER.debug( + "Restore calibration settings from cache for %s was successful", + self._mac_in_str, + ) + return True + _LOGGER.info( + "Failed to restore calibration settings from cache for %s", self.name + ) + return False - def _request_switch(self, state, callback=None): - """Request to switch relay state and request state info""" - self.message_sender( - CircleSwitchRelayRequest(self._mac, state), - callback, - 0, - PRIORITY_HIGH, + async def _calibration_update_state( + self, + gain_a: float | None, + gain_b: float | None, + off_noise: float | None, + off_tot: float | None, + ) -> bool: + """Process new energy calibration settings. Returns True if successful.""" + if gain_a is None or gain_b is None or off_noise is None or off_tot is None: + return False + self._calibration = EnergyCalibration( + gain_a=gain_a, gain_b=gain_b, off_noise=off_noise, off_tot=off_tot ) + # Forward calibration config to energy collection + self._energy_counters.calibration = self._calibration - def request_power_update(self, callback=None): - """Request power usage and update energy counters""" - if self._available: - self.message_sender( - CirclePowerUsageRequest(self._mac), - callback, - ) - _timestamp_utcnow = datetime.utcnow() - # Request new energy counters if last one is more than one hour ago - if self._energy_last_collected_timestamp < _timestamp_utcnow.replace( - minute=0, second=0, microsecond=0 - ): - _LOGGER.info( - "Queue _last_log_address for %s at %s last_collected %s", - str(self.mac), - str(self._last_log_address), - self._energy_last_collected_timestamp, - ) - self._request_info(self.push_last_log_address) + if self._cache_enabled: + self._set_cache(CACHE_CALIBRATION_GAIN_A, gain_a) + self._set_cache(CACHE_CALIBRATION_GAIN_B, gain_b) + self._set_cache(CACHE_CALIBRATION_NOISE, off_noise) + self._set_cache(CACHE_CALIBRATION_TOT, off_tot) + await self.save_cache() + return True + + @raise_calibration_missing + async def power_update(self) -> PowerStatistics | None: + """Update the current power usage statistics. + + Return power usage or None if retrieval failed + """ + # Debounce power + if self.skip_update(self._power, MINIMAL_POWER_UPDATE): + return self._power - if len(self._energy_history_failed_address) > 0: - _mem_address = self._energy_history_failed_address.pop(0) - if self._energy_memory.get(_mem_address, 0) < 4: + request = CirclePowerUsageRequest(self._send, self._mac_in_bytes) + response = await request.send() + if response is None or response.timestamp is None: + _LOGGER.debug( + "No response for async_power_update() for %s", self._mac_in_str + ) + await self._available_update_state(False) + return None + await self._available_update_state(True, response.timestamp) + + # Update power stats + self._power.last_second = self._calc_watts( + response.pulse_1s, 1, response.offset + ) + self._power.last_8_seconds = self._calc_watts( + response.pulse_8s, 8, response.offset + ) + self._power.timestamp = response.timestamp + await self.publish_feature_update_to_subscribers(NodeFeature.POWER, self._power) + + # Forward pulse interval counters to pulse Collection + self._energy_counters.add_pulse_stats( + response.consumed_counter, + response.produced_counter, + response.timestamp, + ) + await self.publish_feature_update_to_subscribers( + NodeFeature.ENERGY, self._energy_counters.energy_statistics + ) + return self._power + + @raise_not_loaded + @raise_calibration_missing + async def energy_update(self) -> EnergyStatistics | None: + """Return updated energy usage statistics.""" + if self._current_log_address is None: + _LOGGER.debug( + "Unable to update energy logs for node %s because last_log_address is unknown.", + self._mac_in_str, + ) + if await self.node_info_update() is None: + if ( + self._initialization_delay_expired is not None + and datetime.now(tz=UTC) < self._initialization_delay_expired + ): _LOGGER.info( - "Collect EnergyCounters for %s at %s", - str(self.mac), - str(_mem_address), + "Unable to return energy statistics for %s during initialization, because it is not responding", + self.name, ) - self.request_energy_counters(_mem_address) - self._energy_ratelimit_collection_timestamp = _timestamp_utcnow else: + _LOGGER.warning( + "Unable to return energy statistics for %s, because it is not responding", + self.name, + ) + return None + + # request node info update every 30 minutes. + elif not self.skip_update(self._node_info, 1800): + if await self.node_info_update() is None: + if ( + self._initialization_delay_expired is not None + and datetime.now(tz=UTC) < self._initialization_delay_expired + ): _LOGGER.info( - "Drop known request_energy_counters for %s at %s and clock sync", - str(self.mac), - str(_mem_address), + "Unable to return energy statistics for %s during initialization, because it is not responding", + self.name, ) - self.get_clock(self.sync_clock) - if datetime.now().day != self._last_clock_sync_day: - self._last_clock_sync_day = datetime.now().day - self.get_clock(self.sync_clock) - - def push_last_log_address(self): - if self._energy_history_failed_address.count(self._last_log_address) == 0: - self._energy_history_failed_address.append(self._last_log_address) - - def message_for_circle(self, message): - """Process received message""" - if isinstance(message, CirclePowerUsageResponse): - if self.calibration: - self._response_power_usage(message) + else: + _LOGGER.warning( + "Unable to return energy statistics for %s, because it is not responding", + self.name, + ) + return None + + # Always request last energy log records at initial startup + if not self._last_energy_log_requested: + self._last_energy_log_requested = await self.energy_log_update( + self._current_log_address + ) + + if self._energy_counters.log_rollover: + if await self.node_info_update() is None: _LOGGER.debug( - "Power update for %s, last update %s", - str(self.mac), - str(self._last_update), + "async_energy_update | %s | Log rollover | node_info_update failed", + self._mac_in_str, ) - else: - _LOGGER.info( - "Received power update for %s before calibration information is known", - str(self.mac), - ) - self._request_calibration(self.request_power_update) - elif isinstance(message, NodeAckLargeResponse): - self._node_ack_response(message) - elif isinstance(message, CircleCalibrationResponse): - self._response_calibration(message) - elif isinstance(message, CircleEnergyCountersResponse): - if self.calibration: - self._response_energy_counters(message) - else: + return None + + if not await self.energy_log_update(self._current_log_address): _LOGGER.debug( - "Received power buffer log for %s before calibration information is known", - str(self.mac), + "async_energy_update | %s | Log rollover | energy_log_update failed", + self._mac_in_str, ) - self._request_calibration(self.request_energy_counters) - elif isinstance(message, CircleClockResponse): - self._response_clock(message) - else: - self.message_for_circle_plus(message) - - def message_for_circle_plus(self, message): - """Pass messages to PlugwiseCirclePlus class""" + return None - def _node_ack_response(self, message): - """Process switch response message""" - if message.ack_id == RELAY_SWITCHED_ON: - if not self._relay_state: - _LOGGER.debug( - "Switch relay on for %s", - str(self.mac), + if ( + self._energy_counters.log_rollover + and self._current_log_address is not None + ): + # Retry with previous log address as Circle node pointer to self._current_log_address + # could be rolled over while the last log is at previous address/slot + _prev_log_address, _ = calc_log_address( + self._current_log_address, 1, -4 ) - self._relay_state = True - self.do_callback(FEATURE_RELAY["id"]) - elif message.ack_id == RELAY_SWITCHED_OFF: - if self._relay_state: + if not await self.energy_log_update(_prev_log_address): + _LOGGER.debug( + "async_energy_update | %s | Log rollover | energy_log_update %s failed", + self._mac_in_str, + _prev_log_address, + ) + return None + + if ( + missing_addresses := self._energy_counters.log_addresses_missing + ) is not None: + if len(missing_addresses) == 0: + await self.power_update() _LOGGER.debug( - "Switch relay off for %s", - str(self.mac), + "async_energy_update for %s | no missing log records", + self._mac_in_str, ) - self._relay_state = False - self.do_callback(FEATURE_RELAY["id"]) - else: - _LOGGER.debug( - "Unmanaged _node_ack_response %s received for %s", - str(message.ack_id), - str(self.mac), - ) - - def _response_power_usage(self, message: CirclePowerUsageResponse): - # Sometimes the circle returns -1 for some of the pulse counters - # likely this means the circle measures very little power and is suffering from - # rounding errors. Zero these out. However, negative pulse values are valid - # for power producing appliances, like solar panels, so don't complain too loudly. + return self._energy_counters.energy_statistics + if len(missing_addresses) == 1: + if await self.energy_log_update(missing_addresses[0]): + await self.power_update() + _LOGGER.debug( + "async_energy_update for %s | single energy log is missing | %s", + self._mac_in_str, + missing_addresses, + ) + return self._energy_counters.energy_statistics - # Power consumption last second - if message.pulse_1s.value == -1: - message.pulse_1s.value = 0 + # Create task to request remaining missing logs + if ( + self._retrieve_energy_logs_task is None + or self._retrieve_energy_logs_task.done() + ): _LOGGER.debug( - "1 sec power pulse counter for node %s has value of -1, corrected to 0", - str(self.mac), - ) - self._pulses_1s = message.pulse_1s.value - if message.pulse_1s.value != 0: - if message.nanosecond_offset.value != 0: - pulses_1s = ( - message.pulse_1s.value - * (1000000000 + message.nanosecond_offset.value) - ) / 1000000000 - else: - pulses_1s = message.pulse_1s.value - self._pulses_1s = pulses_1s + "Create task to update energy logs for node %s", + self._mac_in_str, + ) + self._retrieve_energy_logs_task = create_task( + self.get_missing_energy_logs() + ) else: - self._pulses_1s = 0 - self.do_callback(FEATURE_POWER_USE["id"]) - # Power consumption last 8 seconds - if message.pulse_8s.value == -1: - message.pulse_8s.value = 0 _LOGGER.debug( - "8 sec power pulse counter for node %s has value of -1, corrected to 0", - str(self.mac), - ) - if message.pulse_8s.value != 0: - if message.nanosecond_offset.value != 0: - pulses_8s = ( - message.pulse_8s.value - * (1000000000 + message.nanosecond_offset.value) - ) / 1000000000 - else: - pulses_8s = message.pulse_8s.value - self._pulses_8s = pulses_8s + "Skip creating task to update energy logs for node %s", + self._mac_in_str, + ) + + if ( + self._initialization_delay_expired is not None + and datetime.now(tz=UTC) < self._initialization_delay_expired + ): + _LOGGER.info( + "Unable to return energy statistics for %s during initialization, collecting required data...", + self.name, + ) else: - self._pulses_8s = 0 - self.do_callback(FEATURE_POWER_USE_LAST_8_SEC["id"]) - # Power consumption current hour - if message.pulse_hour_consumed.value == -1: + _LOGGER.warning( + "Unable to return energy statistics for %s, collecting required data...", + self.name, + ) + + return None + + async def get_missing_energy_logs(self) -> None: + """Task to retrieve missing energy logs.""" + + self._energy_counters.update() + if self._current_log_address is None: + return None + + if self._energy_counters.log_addresses_missing is None: _LOGGER.debug( - "1 hour consumption power pulse counter for node %s has value of -1, drop value", - str(self.mac), + "Start with initial energy request for the last 10 log addresses for node %s.", + self._mac_in_str, ) - else: - self._update_energy_current_hour(message.pulse_hour_consumed.value) + total_addresses = 11 + log_address = self._current_log_address + log_update_tasks = [] + while total_addresses > 0: + log_update_tasks.append(self.energy_log_update(log_address)) + log_address, _ = calc_log_address(log_address, 1, -4) + total_addresses -= 1 + + await gather(*log_update_tasks) + + if self._cache_enabled: + await self._energy_log_records_save_to_cache() - # Power produced current hour - if message.pulse_hour_produced.value == -1: - message.pulse_hour_produced.value = 0 + return + + if self._energy_counters.log_addresses_missing is not None: + _LOGGER.debug("Task created to get missing logs of %s", self._mac_in_str) + if ( + missing_addresses := self._energy_counters.log_addresses_missing + ) is not None: _LOGGER.debug( - "1 hour power production pulse counter for node %s has value of -1, corrected to 0", - str(self.mac), - ) - if self._pulses_produced_1h != message.pulse_hour_produced.value: - self._pulses_produced_1h = message.pulse_hour_produced.value - self.do_callback(FEATURE_POWER_PRODUCTION_CURRENT_HOUR["id"]) - - def _response_calibration(self, message: CircleCalibrationResponse): - """Store calibration properties""" - for calibration in ("gain_a", "gain_b", "off_noise", "off_tot"): - val = getattr(message, calibration).value - setattr(self, "_" + calibration, val) - self.calibration = True - - def pulses_to_kws(self, pulses, seconds=1): - """Converts the amount of pulses to kWs using the calaboration offsets""" - if pulses is None: - return None - if pulses == 0 or not self.calibration: - return 0.0 - pulses_per_s = pulses / float(seconds) - corrected_pulses = seconds * ( - ( - (((pulses_per_s + self._off_noise) ** 2) * self._gain_b) - + ((pulses_per_s + self._off_noise) * self._gain_a) + "Task Request %s missing energy logs for node %s | %s", + str(len(missing_addresses)), + self._mac_in_str, + str(missing_addresses), ) - + self._off_tot + + missing_addresses = sorted(missing_addresses, reverse=True) + for address in missing_addresses: + await self.energy_log_update(address) + + if self._cache_enabled: + await self._energy_log_records_save_to_cache() + + async def energy_log_update(self, address: int | None) -> bool: + """Request energy log statistics from node. Returns true if successful.""" + if address is None: + return False + _LOGGER.debug( + "Request of energy log at address %s for node %s", + str(address), + self.name, ) - calc_value = corrected_pulses / PULSES_PER_KW_SECOND / seconds - # Fix minor miscalculations - if -0.001 < calc_value < 0.001: - calc_value = 0.0 - return calc_value + request = CircleEnergyLogsRequest(self._send, self._mac_in_bytes, address) + if (response := await request.send()) is None: + _LOGGER.debug( + "Retrieving of energy log at address %s for node %s failed", + str(address), + self._mac_in_str, + ) + return False - def _collect_energy_pulses(self, start_utc: datetime, end_utc: datetime): - """Return energy pulses of given hours""" + _LOGGER.debug("EnergyLogs data from %s, address=%s", self._mac_in_str, address) + await self._available_update_state(True, response.timestamp) + energy_record_update = False - if start_utc == end_utc: - hours = 0 - else: - hours = int((end_utc - start_utc).seconds / 3600) - _energy_pulses = 0 - for hour in range(0, hours + 1): - _log_timestamp = start_utc + timedelta(hours=hour) - if self._energy_history.get(_log_timestamp) is not None: - _energy_pulses += self._energy_history[_log_timestamp] - _LOGGER.debug( - "_collect_energy_pulses for %s | %s : %s, total = %s", - str(self.mac), - str(_log_timestamp), - str(self._energy_history[_log_timestamp]), - str(_energy_pulses), - ) - else: - _mem_address = self._energy_timestamp_memory_address(_log_timestamp) - if _mem_address is not None and _mem_address >= 0: - _LOGGER.info( - "_collect_energy_pulses for %s at %s | %s not found", - str(self.mac), - str(_log_timestamp), - str(_mem_address), - ) - if self._energy_history_failed_address.count(_mem_address) == 0: - self._energy_history_failed_address.append(_mem_address) - else: - _LOGGER.info( - "_collect_energy_pulses ignoring negative _mem_address %s", - str(_mem_address), + # Forward historical energy log information to energy counters + # Each response message contains 4 log counters (slots) of the + # energy pulses collected during the previous hour of given timestamp + for _slot in range(4, 0, -1): + log_timestamp, log_pulses = response.log_data[_slot] + _LOGGER.debug( + "In slot=%s: pulses=%s, timestamp=%s", + _slot, + log_pulses, + log_timestamp + ) + if log_timestamp is None or log_pulses is None: + self._energy_counters.add_empty_log(response.log_address, _slot) + elif await self._energy_log_record_update_state( + response.log_address, + _slot, + log_timestamp.replace(tzinfo=UTC), + log_pulses, + import_only=True, + ): + energy_record_update = True + self._energy_counters.update() + if energy_record_update: + await self.save_cache() + return True + + async def _energy_log_records_load_from_cache(self) -> bool: + """Load energy_log_record from cache.""" + cache_data = self._get_cache(CACHE_ENERGY_COLLECTION) + if (cache_data := self._get_cache(CACHE_ENERGY_COLLECTION)) is None: + _LOGGER.warning( + "Failed to restore energy log records from cache for node %s", self.name + ) + return False + restored_logs: dict[int, list[int]] = {} + log_data = cache_data.split("|") + for log_record in log_data: + log_fields = log_record.split(":") + if len(log_fields) == 4: + timestamp_energy_log = log_fields[2].split("-") + if len(timestamp_energy_log) == 6: + address = int(log_fields[0]) + slot = int(log_fields[1]) + self._energy_counters.add_pulse_log( + address=address, + slot=slot, + timestamp=datetime( + year=int(timestamp_energy_log[0]), + month=int(timestamp_energy_log[1]), + day=int(timestamp_energy_log[2]), + hour=int(timestamp_energy_log[3]), + minute=int(timestamp_energy_log[4]), + second=int(timestamp_energy_log[5]), + tzinfo=UTC, + ), + pulses=int(log_fields[3]), + import_only=True, ) + if restored_logs.get(address) is None: + restored_logs[address] = [] + restored_logs[address].append(slot) - # Validate all history values where present - if len(self._energy_history_failed_address) == 0: - return _energy_pulses - return None + self._energy_counters.update() - def _update_energy_current_hour(self, _pulses_cur_hour): - """Update energy consumption (pulses) of current hour""" - _LOGGER.info( - "_update_energy_current_hour for %s | counter = %s, update= %s", - str(self.mac), - str(self._energy_pulses_current_hour), - str(_pulses_cur_hour), - ) - if self._energy_pulses_current_hour is None: - self._energy_pulses_current_hour = _pulses_cur_hour - self.do_callback(FEATURE_POWER_CONSUMPTION_CURRENT_HOUR["id"]) - else: - if self._energy_pulses_current_hour != _pulses_cur_hour: - self._energy_pulses_current_hour = _pulses_cur_hour - self.do_callback(FEATURE_POWER_CONSUMPTION_CURRENT_HOUR["id"]) + # Create task to retrieve remaining (missing) logs + if self._energy_counters.log_addresses_missing is None: + return False - if self._last_collected_address_timestamp > datetime(2000, 1, 1): - # Update today after lastlog has been retrieved - self._update_energy_today_now() + if len(self._energy_counters.log_addresses_missing) > 0: + if self._retrieve_energy_logs_task is not None: + if not self._retrieve_energy_logs_task.done(): + await self._retrieve_energy_logs_task - def _update_energy_today_now(self): - """Update energy consumption (pulses) of today up to now""" + self._retrieve_energy_logs_task = create_task( + self.get_missing_energy_logs() + ) + return False - _pulses_today_now = None + return True - # Regular update - if ( - self._energy_pulses_today_hourly is not None - and self._energy_pulses_current_hour is not None - ): - _pulses_today_now = ( - self._energy_pulses_today_hourly + self._energy_pulses_current_hour - ) + async def _energy_log_records_save_to_cache(self) -> None: + """Save currently collected energy logs to cached file.""" + if not self._cache_enabled: + return + logs: dict[int, dict[int, PulseLogRecord]] = ( + self._energy_counters.get_pulse_logs() + ) + cached_logs = "" + for address in sorted(logs.keys(), reverse=True): + for slot in sorted(logs[address].keys(), reverse=True): + log = logs[address][slot] + if cached_logs != "": + cached_logs += "|" + cached_logs += f"{address}:{slot}:{log.timestamp.year}" + cached_logs += f"-{log.timestamp.month}-{log.timestamp.day}" + cached_logs += f"-{log.timestamp.hour}-{log.timestamp.minute}" + cached_logs += f"-{log.timestamp.second}:{log.pulses}" + self._set_cache(CACHE_ENERGY_COLLECTION, cached_logs) - _utc_hour_timestamp = datetime.utcnow().replace( - minute=0, second=0, microsecond=0 + async def _energy_log_record_update_state( + self, + address: int, + slot: int, + timestamp: datetime, + pulses: int, + import_only: bool = False, + ) -> bool: + """Process new energy log record. Returns true if record is new or changed.""" + self._energy_counters.add_pulse_log( + address, slot, timestamp, pulses, import_only=import_only ) - _local_hour = datetime.now().hour - _utc_midnight_timestamp = _utc_hour_timestamp - timedelta(hours=_local_hour) - _local_midnight_timestamp = datetime.now().replace( - hour=0, minute=0, second=0, microsecond=0 + if not self._cache_enabled: + return False + log_cache_record = f"{address}:{slot}:{timestamp.year}" + log_cache_record += f"-{timestamp.month}-{timestamp.day}" + log_cache_record += f"-{timestamp.hour}-{timestamp.minute}" + log_cache_record += f"-{timestamp.second}:{pulses}" + if (cached_logs := self._get_cache(CACHE_ENERGY_COLLECTION)) is not None: + if log_cache_record not in cached_logs: + _LOGGER.debug( + "Add logrecord (%s, %s) to log cache of %s", + str(address), + str(slot), + self._mac_in_str, + ) + self._set_cache( + CACHE_ENERGY_COLLECTION, cached_logs + "|" + log_cache_record + ) + return True + return False + _LOGGER.debug( + "No existing energy collection log cached for %s", self._mac_in_str ) + self._set_cache(CACHE_ENERGY_COLLECTION, log_cache_record) + return True - if _pulses_today_now is None: - if self._energy_pulses_today_hourly is None: - self._update_energy_today_hourly( - _utc_midnight_timestamp + timedelta(hours=1), - _utc_hour_timestamp, - ) - elif ( - self._energy_pulses_today_now is not None - and self._energy_pulses_today_now > _pulses_today_now - and self._energy_pulses_midnight_rollover < _local_midnight_timestamp - ): - _LOGGER.info( - "_update_energy_today_now for %s midnight rollover started old=%s, new=%s", - str(self.mac), - str(self._energy_pulses_today_now), - str(_pulses_today_now), - ) - self._energy_pulses_today_now = 0 - self._energy_pulses_midnight_rollover = _local_midnight_timestamp - self._update_energy_today_hourly( - _utc_midnight_timestamp + timedelta(hours=1), - _utc_hour_timestamp, - ) - self.do_callback(FEATURE_ENERGY_CONSUMPTION_TODAY["id"]) - elif ( - self._energy_pulses_today_now is not None - and self._energy_pulses_today_now > _pulses_today_now - and int( - (self._energy_pulses_today_now - _pulses_today_now) - / (self._energy_pulses_today_now + 1) - * 100 - ) - > 1 - ): - _LOGGER.info( - "_update_energy_today_now for %s hour rollover started old=%s, new=%s", - str(self.mac), - str(self._energy_pulses_today_now), - str(_pulses_today_now), + @raise_not_loaded + async def set_relay(self, state: bool) -> bool: + """Change the state of the relay.""" + if NodeFeature.RELAY not in self._features: + raise FeatureError( + f"Changing state of relay is not supported for node {self.mac}" ) - self._update_energy_today_hourly( - _utc_midnight_timestamp + timedelta(hours=1), - _utc_hour_timestamp, - ) - else: - _LOGGER.info( - "_update_energy_today_now for %s | counter = %s, update= %s (%s + %s)", - str(self.mac), - str(self._energy_pulses_today_now), - str(_pulses_today_now), - str(self._energy_pulses_today_hourly), - str(self._energy_pulses_current_hour), - ) - if self._energy_pulses_today_now is None: - self._energy_pulses_today_now = _pulses_today_now - if self._energy_pulses_today_now is not None: - self.do_callback(FEATURE_ENERGY_CONSUMPTION_TODAY["id"]) - else: - if self._energy_pulses_today_now != _pulses_today_now: - self._energy_pulses_today_now = _pulses_today_now - self.do_callback(FEATURE_ENERGY_CONSUMPTION_TODAY["id"]) + _LOGGER.debug("set_relay() start") + request = CircleRelaySwitchRequest(self._send, self._mac_in_bytes, state) + response = await request.send() - def _update_energy_previous_hour(self, prev_hour: datetime): - """Update energy consumption (pulses) of previous hour""" - _pulses_prev_hour = self._collect_energy_pulses(prev_hour, prev_hour) - _LOGGER.info( - "_update_energy_previous_hour for %s | counter = %s, update= %s, timestamp %s", - str(self.mac), - str(self._energy_pulses_yesterday), - str(_pulses_prev_hour), - str(prev_hour), + if response is None or response.ack_id == NodeResponseType.RELAY_SWITCH_FAILED: + raise NodeError(f"Request to switch relay for {self.name} failed") + + if response.ack_id == NodeResponseType.RELAY_SWITCHED_OFF: + await self._relay_update_state(state=False, timestamp=response.timestamp) + return False + if response.ack_id == NodeResponseType.RELAY_SWITCHED_ON: + await self._relay_update_state(state=True, timestamp=response.timestamp) + return True + + raise NodeError( + f"Unexpected NodeResponseType {response.ack_id!r} received " + + "in response to CircleRelaySwitchRequest for node {self.mac}" ) - if self._energy_pulses_prev_hour is None: - self._energy_pulses_prev_hour = _pulses_prev_hour - if self._energy_pulses_prev_hour is not None: - self.do_callback(FEATURE_POWER_CONSUMPTION_PREVIOUS_HOUR["id"]) - else: - if self._energy_pulses_prev_hour != _pulses_prev_hour: - self._energy_pulses_prev_hour = _pulses_prev_hour - self.do_callback(FEATURE_POWER_CONSUMPTION_PREVIOUS_HOUR["id"]) - def _update_energy_yesterday( - self, start_yesterday: datetime, end_yesterday: datetime - ): - """Update energy consumption (pulses) of yesterday""" - _pulses_yesterday = self._collect_energy_pulses(start_yesterday, end_yesterday) + async def _relay_load_from_cache(self) -> bool: + """Load relay state from cache.""" + if (cached_relay_data := self._get_cache(CACHE_RELAY)) is not None: + _LOGGER.debug("Restore relay state cache for node %s", self._mac_in_str) + relay_state = False + if cached_relay_data == "True": + relay_state = True + await self._relay_update_state(relay_state) + return True _LOGGER.debug( - "_update_energy_yesterday for %s | counter = %s, update= %s, range %s to %s", - str(self.mac), - str(self._energy_pulses_yesterday), - str(_pulses_yesterday), - str(start_yesterday), - str(end_yesterday), + "Failed to restore relay state from cache for node %s, try to request node info...", + self._mac_in_str, ) - if self._energy_pulses_yesterday is None: - self._energy_pulses_yesterday = _pulses_yesterday - if self._energy_pulses_yesterday is not None: - self.do_callback(FEATURE_POWER_CONSUMPTION_YESTERDAY["id"]) - else: - if self._energy_pulses_yesterday != _pulses_yesterday: - self._energy_pulses_yesterday = _pulses_yesterday - self.do_callback(FEATURE_POWER_CONSUMPTION_YESTERDAY["id"]) - - def _update_energy_today_hourly(self, start_today: datetime, end_today: datetime): - """Update energy consumption (pulses) of today up to last hour""" - if start_today > end_today: - _pulses_today_hourly = 0 - else: - _pulses_today_hourly = self._collect_energy_pulses(start_today, end_today) + if await self.node_info_update() is None: + return False + return True + + async def _relay_update_state( + self, state: bool, timestamp: datetime | None = None + ) -> None: + """Process relay state update.""" + state_update = False + if state: + self._set_cache(CACHE_RELAY, "True") + if self._relay_state.state is None or not self._relay_state.state: + state_update = True + if not state: + self._set_cache(CACHE_RELAY, "False") + if self._relay_state.state is None or self._relay_state.state: + state_update = True + self._relay_state = replace(self._relay_state, state=state, timestamp=timestamp) + if state_update: + await self.publish_feature_update_to_subscribers( + NodeFeature.RELAY, self._relay_state + ) + await self.save_cache() + + async def clock_synchronize(self) -> bool: + """Synchronize clock. Returns true if successful.""" + get_clock_request = CircleClockGetRequest(self._send, self._mac_in_bytes) + clock_response = await get_clock_request.send() + if clock_response is None or clock_response.timestamp is None: + return False + _dt_of_circle = datetime.now(tz=UTC).replace( + hour=clock_response.time.hour.value, + minute=clock_response.time.minute.value, + second=clock_response.time.second.value, + microsecond=0, + tzinfo=UTC, + ) + clock_offset = clock_response.timestamp.replace(microsecond=0) - _dt_of_circle + if (clock_offset.seconds < MAX_TIME_DRIFT) or ( + clock_offset.seconds > -(MAX_TIME_DRIFT) + ): + return True _LOGGER.info( - "_update_energy_today_hourly for %s | counter = %s, update= %s, range %s to %s", - str(self.mac), - str(self._energy_pulses_today_hourly), - str(_pulses_today_hourly), - str(start_today), - str(end_today), + "Reset clock of node %s because time has drifted %s sec", + self._mac_in_str, + str(clock_offset.seconds), ) - if self._energy_pulses_today_hourly is None: - self._energy_pulses_today_hourly = _pulses_today_hourly - if self._energy_pulses_today_hourly is not None: - self.do_callback(FEATURE_POWER_CONSUMPTION_TODAY["id"]) - else: - if self._energy_pulses_today_hourly != _pulses_today_hourly: - self._energy_pulses_today_hourly = _pulses_today_hourly - self.do_callback(FEATURE_POWER_CONSUMPTION_TODAY["id"]) - - def request_energy_counters(self, log_address=None, callback=None): - """Request power log of specified address""" - _LOGGER.debug( - "request_energy_counters for %s of address %s", - str(self.mac), - str(log_address), + if self._node_protocols is None: + raise NodeError( + "Unable to synchronize clock en when protocol version is unknown" + ) + set_clock_request = CircleClockSetRequest( + self._send, + self._mac_in_bytes, + datetime.now(tz=UTC), + self._node_protocols.max, ) - if not self._available: + node_response: NodeResponse | None = await set_clock_request.send() + if (node_response := await set_clock_request.send()) is None: + _LOGGER.warning( + "Failed to (re)set the internal clock of %s", + self.name, + ) + return False + if node_response.ack_id == NodeResponseType.CLOCK_ACCEPTED: + return True + return False + + async def load(self) -> bool: + """Load and activate Circle node features.""" + if self._loaded: + return True + if self._cache_enabled: + _LOGGER.debug("Load Circle node %s from cache", self._mac_in_str) + if await self._load_from_cache(): + self._loaded = True + self._setup_protocol( + CIRCLE_FIRMWARE_SUPPORT, + ( + NodeFeature.RELAY, + NodeFeature.RELAY_INIT, + NodeFeature.ENERGY, + NodeFeature.POWER, + ), + ) + if await self.initialize(): + await self._loaded_callback(NodeEvent.LOADED, self.mac) + return True _LOGGER.debug( - "Skip request_energy_counters for % is unavailable", - str(self.mac), + "Load Circle node %s from cache failed", + self._mac_in_str, ) - return - if log_address is None: - log_address = self._last_log_address - if log_address is not None: - # Energy history already collected - if ( - log_address == self._last_log_address - and self._energy_last_populated_slot == 4 - ): - # Rollover of energy counter slot, get new memory address first - self._energy_last_populated_slot = 0 - self._request_info(self.request_energy_counters) - else: - # Request new energy counters - if self._energy_memory.get(log_address, 0) < 4: - self.message_sender( - CircleEnergyCountersRequest(self._mac, log_address), - None, - 0, - PRIORITY_LOW, - ) - else: - _LOGGER.info( - "Drop known request_energy_counters for %s of address %s", - str(self.mac), - str(log_address), - ) else: - self._request_info(self.request_energy_counters) + _LOGGER.debug("Load Circle node %s", self._mac_in_str) - def _response_energy_counters(self, message: CircleEnergyCountersResponse): - """Save historical energy information in local counters - Each response message contains 4 log counters (slots) - of the energy pulses collected during the previous hour of given timestamp - """ - if message.logaddr.value == (self._last_log_address): - self._energy_last_populated_slot = 0 + # Check if node is online + if not self._available and not await self.is_online(): + _LOGGER.debug( + "Failed to load Circle node %s because it is not online", + self._mac_in_str, + ) + return False - # Collect energy history pulses from received log address - # Store pulse in self._energy_history using the timestamp in UTC as index - _utc_hour_timestamp = datetime.utcnow().replace( - minute=0, second=0, microsecond=0 - ) - _local_midnight_timestamp = datetime.now().replace( - hour=0, minute=0, second=0, microsecond=0 + # Get node info + if ( + self.skip_update(self._node_info, 30) + and await self.node_info_update() is None + ): + _LOGGER.debug( + "Failed to load Circle node %s because it is not responding to information request", + self._mac_in_str, + ) + return False + self._loaded = True + self._setup_protocol( + CIRCLE_FIRMWARE_SUPPORT, + ( + NodeFeature.RELAY, + NodeFeature.RELAY_INIT, + NodeFeature.ENERGY, + NodeFeature.POWER, + ), ) - _local_hour = datetime.now().hour - _utc_midnight_timestamp = _utc_hour_timestamp - timedelta(hours=_local_hour) - _midnight_rollover = False - _history_rollover = False - for _slot in range(1, 5): - if ( - _log_timestamp := getattr(message, "logdate%d" % (_slot,)).value - ) is None: - break - # Register collected history memory - if _slot > self._energy_memory.get(message.logaddr.value, 0): - self._energy_memory[message.logaddr.value] = _slot + if not await self.initialize(): + return False + await self._loaded_callback(NodeEvent.LOADED, self.mac) + return True - self._energy_history[_log_timestamp] = getattr( - message, "pulses%d" % (_slot,) - ).value + async def _load_from_cache(self) -> bool: + """Load states from previous cached information. Returns True if successful.""" + if not await super()._load_from_cache(): + return False - _LOGGER.info( - "push _energy_memory for %s address %s slot %s stamp %s", - str(self.mac), - str(message.logaddr.value), - str(_slot), - str(_log_timestamp), + # Calibration settings + if not await self._calibration_load_from_cache(): + _LOGGER.debug( + "Node %s failed to load calibration from cache", self._mac_in_str ) - - # Store last populated _slot - if message.logaddr.value == (self._last_log_address): - self._energy_last_populated_slot = _slot - - # Store most recent timestamp of collected pulses - self._energy_last_collected_timestamp = max( - self._energy_last_collected_timestamp, _log_timestamp + return False + # Energy collection + if await self._energy_log_records_load_from_cache(): + _LOGGER.warning( + "Node %s failed to load energy_log_records from cache", + self._mac_in_str, + ) + # Relay + if await self._relay_load_from_cache(): + _LOGGER.debug( + "Node %s failed to load relay state from cache", + self._mac_in_str, ) + # Relay init config if feature is enabled + if NodeFeature.RELAY_INIT in self._features: + if await self._relay_init_load_from_cache(): + _LOGGER.debug( + "Node %s failed to load relay_init state from cache", + self._mac_in_str, + ) + return True - # Keep track of the most recent timestamp, _last_log_address might be corrupted - if _log_timestamp > self._last_collected_address_timestamp: - self._last_collected_address = message.logaddr.value - self._last_collected_address_slot = _slot - self._last_collected_address_timestamp = _log_timestamp + @raise_not_loaded + async def initialize(self) -> bool: + """Initialize node.""" + if self._initialized: + _LOGGER.debug("Already initialized node %s", self._mac_in_str) + return True - # Trigger history rollover - _LOGGER.info( - "history_rollover %s %s %s", - str(_log_timestamp), - str(_utc_hour_timestamp), - str(self._energy_last_rollover_timestamp), + if not await self.clock_synchronize(): + _LOGGER.debug( + "Failed to initialized node %s, failed clock sync", self._mac_in_str ) - if ( - _log_timestamp == _utc_hour_timestamp - and self._energy_last_rollover_timestamp < _utc_hour_timestamp - ): - self._energy_last_rollover_timestamp = _utc_hour_timestamp - _history_rollover = True - _LOGGER.info( - "_response_energy_counters for %s | history rollover, reset date to %s", - str(self.mac), - str(_utc_hour_timestamp), + self._initialized = False + return False + if not self._calibration and not await self.calibration_update(): + _LOGGER.debug( + "Failed to initialized node %s, no calibration", self._mac_in_str + ) + self._initialized = False + return False + if ( + self.skip_update(self._node_info, 30) + and await self.node_info_update() is None + ): + _LOGGER.debug("Failed to retrieve node info for %s", self._mac_in_str) + if NodeFeature.RELAY_INIT in self._features: + if (state := await self._relay_init_get()) is not None: + self._relay_config = replace(self._relay_config, init_state=state) + else: + _LOGGER.debug( + "Failed to initialized node %s, relay init", self._mac_in_str ) + self._initialized = False + return False + return await super().initialize() - # Trigger midnight rollover - if ( - _log_timestamp == _utc_midnight_timestamp - and self._energy_consumption_today_reset < _local_midnight_timestamp - ): - _LOGGER.info( - "_response_energy_counters for %s | midnight rollover, reset date to %s", - str(self.mac), - str(_local_midnight_timestamp), - ) - self._energy_consumption_today_reset = _local_midnight_timestamp - _midnight_rollover = True - if self._energy_last_collected_timestamp == datetime.utcnow().replace( - minute=0, second=0, microsecond=0 + async def node_info_update( + self, node_info: NodeInfoResponse | None = None + ) -> NodeInfo | None: + """Update Node (hardware) information.""" + if node_info is None: + if self.skip_update(self._node_info, 30): + return self._node_info + node_request = NodeInfoRequest(self._send, self._mac_in_bytes) + node_info = await node_request.send() + if node_info is None: + return None + await super().node_info_update(node_info) + await self._relay_update_state( + node_info.relay_state, timestamp=node_info.timestamp + ) + if self._current_log_address is not None and ( + self._current_log_address > node_info.current_logaddress_pointer + or self._current_log_address == 1 ): - self._update_energy_previous_hour(_utc_hour_timestamp) - self._update_energy_today_hourly( - _utc_midnight_timestamp + timedelta(hours=1), - _utc_hour_timestamp, + # Rollover of log address + _LOGGER.debug( + "Rollover log address from %s into %s for node %s", + self._current_log_address, + node_info.current_logaddress_pointer, + self._mac_in_str, ) - self._update_energy_yesterday( - _utc_midnight_timestamp - timedelta(hours=23), - _utc_midnight_timestamp, + if self._current_log_address != node_info.current_logaddress_pointer: + self._current_log_address = node_info.current_logaddress_pointer + self._set_cache( + CACHE_CURRENT_LOG_ADDRESS, node_info.current_logaddress_pointer ) - else: - _LOGGER.info( - "CircleEnergyCounter failed for %s at %s|%s count %s", - str(self.mac), - str(message.logaddr.value), - str(self._last_log_address), - str(self._energy_last_collected_count), - ) - self._energy_last_collected_count += 1 + await self.save_cache() + return self._node_info - if ( - message.logaddr.value == self._last_log_address - and self._energy_last_collected_count > 3 - ): - if ( - self._energy_history_failed_address.count( - self._last_log_address - 1 - ) - == 0 - ): - self._energy_history_failed_address.append( - self._last_log_address - 1 - ) - _LOGGER.info("Resetting CircleEnergyCounter due to logaddress offset") - - # Cleanup energy history for more than 48 hours - _48_hours_ago = datetime.utcnow().replace( - minute=0, second=0, microsecond=0 - ) - timedelta(hours=48) - for log_timestamp in list(self._energy_history.keys()): - if log_timestamp < _48_hours_ago: - del self._energy_history[log_timestamp] - - def _response_clock(self, message: CircleClockResponse): - log_date = datetime( - datetime.now().year, - datetime.now().month, - datetime.now().day, - message.time.value.hour, - message.time.value.minute, - message.time.value.second, - ) - clock_offset = message.timestamp.replace(microsecond=0) - ( - log_date + self.timezone_delta - ) - if clock_offset.days == -1: - self._clock_offset = clock_offset.seconds - 86400 - else: - self._clock_offset = clock_offset.seconds - _LOGGER.debug( - "Clock of node %s has drifted %s sec", - str(self.mac), - str(self._clock_offset), + async def _node_info_load_from_cache(self) -> bool: + """Load node info settings from cache.""" + result = await super()._node_info_load_from_cache() + if ( + current_log_address := self._get_cache(CACHE_CURRENT_LOG_ADDRESS) + ) is not None: + self._current_log_address = int(current_log_address) + return result + return False + + # pylint: disable=too-many-arguments + async def update_node_details( + self, + firmware: datetime | None, + hardware: str | None, + node_type: NodeType | None, + timestamp: datetime | None, + relay_state: bool | None, + logaddress_pointer: int | None, + ) -> bool: + """Process new node info and return true if all fields are updated.""" + if relay_state is not None: + self._relay_state = replace( + self._relay_state, state=relay_state, timestamp=timestamp + ) + if logaddress_pointer is not None: + self._current_log_address = logaddress_pointer + return await super().update_node_details( + firmware, + hardware, + node_type, + timestamp, + relay_state, + logaddress_pointer, ) - def get_clock(self, callback=None): - """Get current datetime of internal clock of Circle.""" - self.message_sender( - CircleClockGetRequest(self._mac), - callback, - 0, - PRIORITY_LOW, + async def unload(self) -> None: + """Deactivate and unload node features.""" + self._loaded = False + if ( + self._retrieve_energy_logs_task is not None + and not self._retrieve_energy_logs_task.done() + ): + self._retrieve_energy_logs_task.cancel() + await self._retrieve_energy_logs_task + if self._cache_enabled: + await self._energy_log_records_save_to_cache() + await super().unload() + + @raise_not_loaded + async def set_relay_init(self, state: bool) -> bool: + """Change the initial power-on state of the relay.""" + if NodeFeature.RELAY_INIT not in self._features: + raise FeatureError( + f"Configuration of initial power-up relay state is not supported for node {self.mac}" + ) + await self._relay_init_set(state) + if self._relay_config.init_state is None: + raise NodeError("Failed to configure relay init setting") + return self._relay_config.init_state + + async def _relay_init_get(self) -> bool | None: + """Get current configuration of the power-up state of the relay. Returns None if retrieval failed.""" + if NodeFeature.RELAY_INIT not in self._features: + raise NodeError( + "Retrieval of initial state of relay is not " + + f"supported for device {self.name}" + ) + request = CircleRelayInitStateRequest( + self._send, self._mac_in_bytes, False, False ) + if (response := await request.send()) is not None: + await self._relay_init_update_state(response.relay.value == 1) + return self._relay_config.init_state + return None - def set_clock(self, callback=None): - """Set internal clock of Circle.""" - self.message_sender( - CircleClockSetRequest(self._mac, datetime.utcnow()), - callback, + async def _relay_init_set(self, state: bool) -> bool | None: + """Configure relay init state.""" + if NodeFeature.RELAY_INIT not in self._features: + raise NodeError( + "Configuring of initial state of relay is not" + + f"supported for device {self.name}" + ) + request = CircleRelayInitStateRequest( + self._send, self._mac_in_bytes, True, state ) + if (response := await request.send()) is not None: + await self._relay_init_update_state(response.relay.value == 1) + return self._relay_config.init_state + return None - def sync_clock(self, max_drift=0): - """Resync clock of node if time has drifted more than MAX_TIME_DRIFT""" - if self._clock_offset is not None: - if max_drift == 0: - max_drift = MAX_TIME_DRIFT - if (self._clock_offset > max_drift) or (self._clock_offset < -(max_drift)): - _LOGGER.info( - "Reset clock of node %s because time has drifted %s sec", - str(self.mac), - str(self._clock_offset), - ) - self.set_clock() + async def _relay_init_load_from_cache(self) -> bool: + """Load relay init state from cache. Returns True if retrieval was successful.""" + if (cached_relay_data := self._get_cache(CACHE_RELAY_INIT)) is not None: + relay_init_state = False + if cached_relay_data == "True": + relay_init_state = True + await self._relay_init_update_state(relay_init_state) + return True + return False - def _energy_timestamp_memory_address(self, utc_timestamp: datetime): - """Return memory address for given energy counter timestamp""" - if self._last_collected_address is None: - return None - # Should already be hour timestamp, but just to be sure. - _utc_now_timestamp = self._last_collected_address_timestamp.replace( - minute=0, second=0, microsecond=0 - ) - if utc_timestamp > _utc_now_timestamp: + async def _relay_init_update_state(self, state: bool) -> None: + """Process relay init state update.""" + state_update = False + if state: + self._set_cache(CACHE_RELAY_INIT, "True") + if ( + self._relay_config.init_state is None + or not self._relay_config.init_state + ): + state_update = True + if not state: + self._set_cache(CACHE_RELAY_INIT, "False") + if self._relay_config.init_state is None or self._relay_config.init_state: + state_update = True + if state_update: + self._relay_config = replace(self._relay_config, init_state=state) + await self.publish_feature_update_to_subscribers( + NodeFeature.RELAY_INIT, self._relay_config + ) + await self.save_cache() + + @raise_calibration_missing + def _calc_watts(self, pulses: int, seconds: int, nano_offset: int) -> float | None: + """Calculate watts based on energy usages.""" + if self._calibration is None: return None - _seconds_offset = (_utc_now_timestamp - utc_timestamp).total_seconds() - _hours_offset = _seconds_offset / 3600 - - if (_slot := self._last_collected_address_slot) == 0: - _slot = 4 - _address = self._last_collected_address - _sslot = _slot - - # last known - _hours = 1 - while _hours <= _hours_offset: - _slot -= 1 - if _slot == 0: - _address -= 1 - _slot = 4 - _hours += 1 - _LOGGER.info( - "Calculated address %s at %s from %s at %s with %s|%s", - _address, - utc_timestamp, - self._last_log_address, - _utc_now_timestamp, - _sslot, - _hours_offset, + pulses_per_s = self._correct_power_pulses(pulses, nano_offset) / float(seconds) + negative = False + if pulses_per_s < 0: + negative = True + pulses_per_s = abs(pulses_per_s) + + corrected_pulses = seconds * ( + ( + ( + ((pulses_per_s + self._calibration.off_noise) ** 2) + * self._calibration.gain_b + ) + + ( + (pulses_per_s + self._calibration.off_noise) + * self._calibration.gain_a + ) + ) + + self._calibration.off_tot ) - return _address + if negative: + corrected_pulses = -corrected_pulses + + return corrected_pulses / PULSES_PER_KW_SECOND / seconds * (1000) + + def _correct_power_pulses(self, pulses: int, offset: int) -> float: + """Correct pulses based on given measurement time offset (ns).""" + + # Sometimes the circle returns -1 for some of the pulse counters + # likely this means the circle measures very little power and is + # suffering from rounding errors. Zero these out. However, negative + # pulse values are valid for power producing appliances, like + # solar panels, so don't complain too loudly. + if pulses == -1: + _LOGGER.debug( + "Power pulse counter for node %s of value of -1, corrected to 0", + self._mac_in_str, + ) + return 0.0 + if pulses != 0: + if offset != 0: + return ( + pulses * (SECOND_IN_NANOSECONDS + offset) + ) / SECOND_IN_NANOSECONDS + return pulses + return 0.0 + + @raise_not_loaded + async def get_state(self, features: tuple[NodeFeature]) -> dict[NodeFeature, Any]: + """Update latest state for given feature.""" + states: dict[NodeFeature, Any] = {} + if not self._available: + if not await self.is_online(): + _LOGGER.debug( + "Node %s did not respond, unable to update state", self._mac_in_str + ) + for feature in features: + states[feature] = None + states[NodeFeature.AVAILABLE] = self.available_state + return states + + for feature in features: + if feature not in self._features: + raise NodeError( + f"Update of feature '{feature}' is not supported for {self.name}" + ) + if feature == NodeFeature.ENERGY: + states[feature] = await self.energy_update() + _LOGGER.debug( + "async_get_state %s - energy: %s", + self._mac_in_str, + states[feature], + ) + elif feature == NodeFeature.RELAY: + states[feature] = self._relay_state + _LOGGER.debug( + "async_get_state %s - relay: %s", + self._mac_in_str, + states[feature], + ) + elif feature == NodeFeature.RELAY_INIT: + states[feature] = self._relay_config + elif feature == NodeFeature.POWER: + states[feature] = await self.power_update() + _LOGGER.debug( + "async_get_state %s - power: %s", + self._mac_in_str, + states[feature], + ) + else: + state_result = await super().get_state((feature,)) + states[feature] = state_result[feature] + if NodeFeature.AVAILABLE not in states: + states[NodeFeature.AVAILABLE] = self.available_state + return states diff --git a/plugwise_usb/nodes/circle_plus.py b/plugwise_usb/nodes/circle_plus.py index 39b7b5361..fcda89dee 100644 --- a/plugwise_usb/nodes/circle_plus.py +++ b/plugwise_usb/nodes/circle_plus.py @@ -1,137 +1,121 @@ -"""Plugwise Circle+ node object.""" -from datetime import datetime +"""Plugwise Circle+ node.""" + +from __future__ import annotations + +from datetime import UTC, datetime import logging -from ..constants import MAX_TIME_DRIFT, PRIORITY_LOW, UTF8_DECODE +from ..api import NodeEvent, NodeFeature +from ..constants import MAX_TIME_DRIFT from ..messages.requests import ( CirclePlusRealTimeClockGetRequest, CirclePlusRealTimeClockSetRequest, - CirclePlusScanRequest, ) -from ..messages.responses import CirclePlusRealTimeClockResponse, CirclePlusScanResponse -from ..nodes.circle import PlugwiseCircle +from ..messages.responses import NodeResponseType +from .circle import PlugwiseCircle +from .helpers.firmware import CIRCLE_PLUS_FIRMWARE_SUPPORT _LOGGER = logging.getLogger(__name__) class PlugwiseCirclePlus(PlugwiseCircle): - """provides interface to the Plugwise Circle+ nodes""" + """Plugwise Circle+ node.""" - def __init__(self, mac, address, message_sender): - super().__init__(mac, address, message_sender) - self._plugwise_nodes = {} - self._scan_response = {} - self._scan_for_nodes_callback = None - self._realtime_clock_offset = None - self.get_real_time_clock(self.sync_realtime_clock) - - def message_for_circle_plus(self, message): - """Process received message""" - if isinstance(message, CirclePlusRealTimeClockResponse): - self._response_realtime_clock(message) - elif isinstance(message, CirclePlusScanResponse): - self._process_scan_response(message) + async def load(self) -> bool: + """Load and activate Circle+ node features.""" + if self._loaded: + return True + if self._cache_enabled: + _LOGGER.debug("Load Circle node %s from cache", self._node_info.mac) + if await self._load_from_cache(): + self._loaded = True + self._setup_protocol( + CIRCLE_PLUS_FIRMWARE_SUPPORT, + ( + NodeFeature.RELAY, + NodeFeature.RELAY_INIT, + NodeFeature.ENERGY, + NodeFeature.POWER, + ), + ) + if await self.initialize(): + await self._loaded_callback(NodeEvent.LOADED, self.mac) + return True + _LOGGER.info( + "Load Circle+ node %s from cache failed", + self._node_info.mac, + ) else: - _LOGGER.waning( - "Unsupported message type '%s' received from circle with mac %s", - str(message.__class__.__name__), - self.mac, + _LOGGER.debug("Load Circle+ node %s", self._node_info.mac) + + # Check if node is online + if not self._available and not await self.is_online(): + _LOGGER.warning( + "Failed to load Circle+ node %s because it is not online", + self._node_info.mac, ) + return False - def scan_for_nodes(self, callback=None): - """Scan for registered nodes.""" - self._scan_for_nodes_callback = callback - for node_address in range(0, 64): - self.message_sender(CirclePlusScanRequest(self._mac, node_address)) - self._scan_response[node_address] = False + # Get node info + if await self.node_info_update() is None: + _LOGGER.warning( + "Failed to load Circle+ node %s because it is not responding to information request", + self._node_info.mac, + ) + return False + self._loaded = True + self._setup_protocol( + CIRCLE_PLUS_FIRMWARE_SUPPORT, + ( + NodeFeature.RELAY, + NodeFeature.RELAY_INIT, + NodeFeature.ENERGY, + NodeFeature.POWER, + ), + ) + if not await self.initialize(): + return False + await self._loaded_callback(NodeEvent.LOADED, self.mac) + return True - def _process_scan_response(self, message): - """Process scan response message.""" - _LOGGER.debug( - "Process scan response for address %s", message.node_address.value + async def clock_synchronize(self) -> bool: + """Synchronize realtime clock. Returns true if successful.""" + clock_request = CirclePlusRealTimeClockGetRequest( + self._send, self._mac_in_bytes ) - if message.node_mac.value != b"FFFFFFFFFFFFFFFF": + if (clock_response := await clock_request.send()) is None: _LOGGER.debug( - "Linked plugwise node with mac %s found", - message.node_mac.value.decode(UTF8_DECODE), + "No response for async_realtime_clock_synchronize() for %s", self.mac ) - # TODO: 20220206 is there 'mac' in the dict? Otherwise it can be rewritten to just if message... in - if not self._plugwise_nodes.get(message.node_mac.value.decode(UTF8_DECODE)): - self._plugwise_nodes[ - message.node_mac.value.decode(UTF8_DECODE) - ] = message.node_address.value - if self._scan_for_nodes_callback: - # Check if scan is complete before execute callback - scan_complete = False - self._scan_response[message.node_address.value] = True - for node_address in range(0, 64): - if not self._scan_response[node_address]: - if node_address < message.node_address.value: - # Apparently missed response so send new scan request if it's not in queue yet - _LOGGER.debug( - "Resend missing scan request for address %s", - str(node_address), - ) - self.message_sender( - CirclePlusScanRequest(self._mac, node_address) - ) - break - if node_address == 63: - scan_complete = True - if scan_complete and self._scan_for_nodes_callback: - self._scan_for_nodes_callback(self._plugwise_nodes) - self._scan_for_nodes_callback = None - self._plugwise_nodes = {} + await self._available_update_state(False) + return False + await self._available_update_state(True, clock_response.timestamp) - def get_real_time_clock(self, callback=None): - """Get current datetime of internal clock of CirclePlus.""" - self.message_sender( - CirclePlusRealTimeClockGetRequest(self._mac), - callback, - 0, - PRIORITY_LOW, + _dt_of_circle: datetime = datetime.now(tz=UTC).replace( + hour=clock_response.time.value.hour, + minute=clock_response.time.value.minute, + second=clock_response.time.value.second, + microsecond=0, + tzinfo=UTC, ) - - def _response_realtime_clock(self, message): - realtime_clock_dt = datetime( - datetime.now().year, - datetime.now().month, - datetime.now().day, - message.time.value.hour, - message.time.value.minute, - message.time.value.second, + clock_offset = clock_response.timestamp.replace(microsecond=0) - _dt_of_circle + if (clock_offset.seconds < MAX_TIME_DRIFT) or ( + clock_offset.seconds > -(MAX_TIME_DRIFT) + ): + return True + _LOGGER.info( + "Reset realtime clock of node %s because time has drifted %s seconds while max drift is set to %s seconds)", + self._node_info.mac, + str(clock_offset.seconds), + str(MAX_TIME_DRIFT), ) - realtime_clock_offset = message.timestamp.replace(microsecond=0) - ( - realtime_clock_dt + self.timezone_delta + clock_set_request = CirclePlusRealTimeClockSetRequest( + self._send, self._mac_in_bytes, datetime.now(tz=UTC) ) - if realtime_clock_offset.days == -1: - self._realtime_clock_offset = realtime_clock_offset.seconds - 86400 - else: - self._realtime_clock_offset = realtime_clock_offset.seconds - _LOGGER.debug( - "Realtime clock of node %s has drifted %s sec", - self.mac, - str(self._clock_offset), + if (node_response := await clock_set_request.send()) is not None: + return node_response.ack_id == NodeResponseType.CLOCK_ACCEPTED + _LOGGER.warning( + "Failed to (re)set the internal realtime clock of %s", + self.name, ) - - def set_real_time_clock(self, callback=None): - """Set internal clock of CirclePlus.""" - self.message_sender( - CirclePlusRealTimeClockSetRequest(self._mac, datetime.utcnow()), - callback, - ) - - def sync_realtime_clock(self, max_drift=0): - """Sync real time clock of node if time has drifted more than max drifted.""" - if self._realtime_clock_offset is not None: - if max_drift == 0: - max_drift = MAX_TIME_DRIFT - if (self._realtime_clock_offset > max_drift) or ( - self._realtime_clock_offset < -(max_drift) - ): - _LOGGER.info( - "Reset realtime clock of node %s because time has drifted %s sec", - self.mac, - str(self._clock_offset), - ) - self.set_real_time_clock() + return False diff --git a/plugwise_usb/nodes/helpers/__init__.py b/plugwise_usb/nodes/helpers/__init__.py new file mode 100644 index 000000000..1ef0b8a86 --- /dev/null +++ b/plugwise_usb/nodes/helpers/__init__.py @@ -0,0 +1,33 @@ +"""Helpers for Plugwise nodes.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from functools import wraps +from typing import Any, TypeVar, cast + +from ...exceptions import NodeError + + +@dataclass +class EnergyCalibration: + """Definition of a calibration for Plugwise devices (Circle, Stealth).""" + + gain_a: float + gain_b: float + off_noise: float + off_tot: float + + +FuncT = TypeVar("FuncT", bound=Callable[..., Any]) + + +def raise_not_loaded(func: FuncT) -> FuncT: + """Raise NodeError when node is not loaded.""" + @wraps(func) + def decorated(*args: Any, **kwargs: Any) -> Any: + if not args[0].is_loaded: + raise NodeError(f"Node {args[0].mac} is not loaded yet") + return func(*args, **kwargs) + return cast(FuncT, decorated) diff --git a/plugwise_usb/nodes/helpers/cache.py b/plugwise_usb/nodes/helpers/cache.py new file mode 100644 index 000000000..b234638e7 --- /dev/null +++ b/plugwise_usb/nodes/helpers/cache.py @@ -0,0 +1,59 @@ +"""Caching for plugwise node.""" + +from __future__ import annotations + +import logging + +from ...helpers.cache import PlugwiseCache + +_LOGGER = logging.getLogger(__name__) + + +class NodeCache(PlugwiseCache): + """Class to cache specific node configuration and states.""" + + def __init__(self, mac: str, cache_root_dir: str = "") -> None: + """Initialize NodeCache class.""" + self._mac = mac + self._node_cache_file_name = f"{mac}.cache" + super().__init__(self._node_cache_file_name, cache_root_dir) + self._states: dict[str, str] = {} + + @property + def states(self) -> dict[str, str]: + """Cached node state information.""" + return self._states + + def update_state(self, state: str, value: str) -> None: + """Add configuration state to cache.""" + self._states[state] = value + + def remove_state(self, state: str) -> None: + """Remove configuration state from cache.""" + if self._states.get(state) is not None: + self._states.pop(state) + + def get_state(self, state: str) -> str | None: + """Return current value for state.""" + return self._states.get(state, None) + + async def save_cache(self, rewrite: bool = False) -> None: + """Save the node configuration to file.""" + await self.write_cache(self._states, rewrite) + _LOGGER.debug( + "Cached settings saved to cache file %s", + str(self._cache_file), + ) + + async def clear_cache(self) -> None: + """Clear current cache.""" + self._states = {} + await self.delete_cache() + + async def restore_cache(self) -> bool: + """Load the previously store state information.""" + data: dict[str, str] = await self.read_cache() + self._states.clear() + for key, value in data.items(): + self._states[key] = value + return True diff --git a/plugwise_usb/nodes/helpers/counter.py b/plugwise_usb/nodes/helpers/counter.py new file mode 100644 index 000000000..0e16e08b5 --- /dev/null +++ b/plugwise_usb/nodes/helpers/counter.py @@ -0,0 +1,339 @@ +"""Energy counter.""" + +from __future__ import annotations + +from datetime import datetime #, timedelta +from enum import Enum, auto +import logging +from typing import Final + +from ...api import EnergyStatistics +from ...constants import HOUR_IN_SECONDS, LOCAL_TIMEZONE, PULSES_PER_KW_SECOND +from ...exceptions import EnergyError +from ..helpers import EnergyCalibration +from .pulses import PulseCollection, PulseLogRecord + + +class EnergyType(Enum): + """Energy collection types.""" + + CONSUMPTION_HOUR = auto() + PRODUCTION_HOUR = auto() + CONSUMPTION_DAY = auto() + PRODUCTION_DAY = auto() + CONSUMPTION_WEEK = auto() + PRODUCTION_WEEK = auto() + + +ENERGY_COUNTERS: Final = ( + EnergyType.CONSUMPTION_HOUR, + EnergyType.PRODUCTION_HOUR, + EnergyType.CONSUMPTION_DAY, + EnergyType.PRODUCTION_DAY, + # EnergyType.CONSUMPTION_WEEK, + # EnergyType.PRODUCTION_WEEK, +) +ENERGY_HOUR_COUNTERS: Final = ( + EnergyType.CONSUMPTION_HOUR, + EnergyType.PRODUCTION_HOUR, +) +ENERGY_DAY_COUNTERS: Final = ( + EnergyType.CONSUMPTION_DAY, + EnergyType.PRODUCTION_DAY, +) +ENERGY_WEEK_COUNTERS: Final = ( + EnergyType.CONSUMPTION_WEEK, + EnergyType.PRODUCTION_WEEK, +) + +ENERGY_CONSUMPTION_COUNTERS: Final = ( + EnergyType.CONSUMPTION_HOUR, + EnergyType.CONSUMPTION_DAY, + # EnergyType.CONSUMPTION_WEEK, +) +ENERGY_PRODUCTION_COUNTERS: Final = ( + EnergyType.PRODUCTION_HOUR, + EnergyType.PRODUCTION_DAY, + # EnergyType.PRODUCTION_WEEK, +) + +_LOGGER = logging.getLogger(__name__) + + +class EnergyCounters: + """Hold all energy counters.""" + + def __init__(self, mac: str) -> None: + """Initialize EnergyCounter class.""" + self._mac = mac + self._calibration: EnergyCalibration | None = None + self._counters: dict[EnergyType, EnergyCounter] = {} + for energy_type in ENERGY_COUNTERS: + self._counters[energy_type] = EnergyCounter(energy_type, mac) + self._pulse_collection = PulseCollection(mac) + self._energy_statistics = EnergyStatistics() + + @property + def collected_logs(self) -> int: + """Total collected logs.""" + return self._pulse_collection.collected_logs + + def add_empty_log(self, address: int, slot: int) -> None: + """Add empty energy log record to mark any start of beginning of energy log collection.""" + self._pulse_collection.add_empty_log(address, slot) + + def add_pulse_log( # pylint: disable=too-many-arguments + self, + address: int, + slot: int, + timestamp: datetime, + pulses: int, + import_only: bool = False, + ) -> None: + """Add pulse log.""" + if self._pulse_collection.add_log( + address, slot, timestamp, pulses, import_only + ): + if not import_only: + self.update() + + def get_pulse_logs(self) -> dict[int, dict[int, PulseLogRecord]]: + """Return currently collected pulse logs.""" + return self._pulse_collection.logs + + def add_pulse_stats( + self, pulses_consumed: int, pulses_produced: int, timestamp: datetime + ) -> None: + """Add pulse statistics.""" + _LOGGER.debug("add_pulse_stats for %s with timestamp=%s", self._mac, timestamp) + _LOGGER.debug("consumed=%s | produced=%s", pulses_consumed, pulses_produced) + self._pulse_collection.update_pulse_counter( + pulses_consumed, pulses_produced, timestamp + ) + self.update() + + @property + def energy_statistics(self) -> EnergyStatistics: + """Return collection with energy statistics.""" + return self._energy_statistics + + @property + def consumption_interval(self) -> int | None: + """Measurement interval for energy consumption.""" + return self._pulse_collection.log_interval_consumption + + @property + def production_interval(self) -> int | None: + """Measurement interval for energy production.""" + return self._pulse_collection.log_interval_production + + @property + def log_addresses_missing(self) -> list[int] | None: + """Return list of addresses of energy logs.""" + return self._pulse_collection.log_addresses_missing + + @property + def log_rollover(self) -> bool: + """Indicate if new log is required due to rollover.""" + return self._pulse_collection.log_rollover + + @property + def calibration(self) -> EnergyCalibration | None: + """Energy calibration configuration.""" + return self._calibration + + @calibration.setter + def calibration(self, calibration: EnergyCalibration) -> None: + """Energy calibration configuration.""" + for node_event in ENERGY_COUNTERS: + self._counters[node_event].calibration = calibration + self._calibration = calibration + + def update(self) -> None: + """Update counter collection.""" + self._pulse_collection.recalculate_missing_log_addresses() + if self._calibration is None: + return + + self._energy_statistics.log_interval_consumption = ( + self._pulse_collection.log_interval_consumption + ) + ( + self._energy_statistics.hour_consumption, + self._energy_statistics.hour_consumption_reset, + ) = self._counters[EnergyType.CONSUMPTION_HOUR].update(self._pulse_collection) + ( + self._energy_statistics.day_consumption, + self._energy_statistics.day_consumption_reset, + ) = self._counters[EnergyType.CONSUMPTION_DAY].update(self._pulse_collection) + # ( + # self._energy_statistics.week_consumption, + # self._energy_statistics.week_consumption_reset, + # ) = self._counters[EnergyType.CONSUMPTION_WEEK].update(self._pulse_collection) + + if self._pulse_collection.production_logging: + self._energy_statistics.log_interval_production = ( + self._pulse_collection.log_interval_production + ) + ( + self._energy_statistics.hour_production, + self._energy_statistics.hour_production_reset, + ) = self._counters[EnergyType.PRODUCTION_HOUR].update(self._pulse_collection) + ( + self._energy_statistics.day_production, + self._energy_statistics.day_production_reset, + ) = self._counters[EnergyType.PRODUCTION_DAY].update(self._pulse_collection) + # ( + # self._energy_statistics.week_production, + # self._energy_statistics.week_production_reset, + # ) = self._counters[EnergyType.PRODUCTION_WEEK].update(self._pulse_collection) + + @property + def timestamp(self) -> datetime | None: + """Return the last valid timestamp or None.""" + if self._calibration is None: + return None + if self._pulse_collection.log_addresses_missing is None: + return None + if len(self._pulse_collection.log_addresses_missing) > 0: + return None + return self._pulse_collection.last_update + + +class EnergyCounter: + """Energy counter to convert pulses into energy.""" + + def __init__( + self, + energy_id: EnergyType, + mac: str, + ) -> None: + """Initialize energy counter based on energy id.""" + self._mac = mac + if energy_id not in ENERGY_COUNTERS: + raise EnergyError(f"Invalid energy id '{energy_id}' for Energy counter") + + self._calibration: EnergyCalibration | None = None + self._duration = "hour" + if energy_id in ENERGY_DAY_COUNTERS: + self._duration = "day" + #elif energy_id in ENERGY_WEEK_COUNTERS: + # self._duration = "week" + + self._energy_id: EnergyType = energy_id + self._is_consumption = True + self._direction = "consumption" + if self._energy_id in ENERGY_PRODUCTION_COUNTERS: + self._direction = "production" + self._is_consumption = False + + self._last_reset: datetime | None = None + self._last_update: datetime | None = None + self._pulses: int | None = None + + @property + def direction(self) -> str: + """Energy direction (consumption or production).""" + return self._direction + + @property + def duration(self) -> str: + """Energy time span.""" + return self._duration + + @property + def calibration(self) -> EnergyCalibration | None: + """Energy calibration configuration.""" + return self._calibration + + @calibration.setter + def calibration(self, calibration: EnergyCalibration) -> None: + """Energy calibration configuration.""" + self._calibration = calibration + + @property + def is_consumption(self) -> bool: + """Indicate the energy direction.""" + return self._is_consumption + + @property + def energy(self) -> float | None: + """Total energy (in kWh) since last reset.""" + if self._pulses is None or self._calibration is None: + return None + + if self._pulses == 0: + return 0.0 + + # Handle both positive and negative pulses values + negative = False + if self._pulses < 0: + negative = True + + pulses_per_s = abs(self._pulses) / float(HOUR_IN_SECONDS) + corrected_pulses = HOUR_IN_SECONDS * ( + ( + ( + ((pulses_per_s + self._calibration.off_noise) ** 2) + * self._calibration.gain_b + ) + + ( + (pulses_per_s + self._calibration.off_noise) + * self._calibration.gain_a + ) + ) + + self._calibration.off_tot + ) + calc_value = corrected_pulses / PULSES_PER_KW_SECOND / HOUR_IN_SECONDS + if negative: + calc_value = -calc_value + + return calc_value + + @property + def last_reset(self) -> datetime | None: + """Last reset of energy counter.""" + return self._last_reset + + @property + def last_update(self) -> datetime | None: + """Last update of energy counter.""" + return self._last_update + + def update( + self, pulse_collection: PulseCollection + ) -> tuple[float | None, datetime | None]: + """Get pulse update.""" + last_reset = datetime.now(tz=LOCAL_TIMEZONE) + if self._energy_id in ENERGY_HOUR_COUNTERS: + last_reset = last_reset.replace(minute=0, second=0, microsecond=0) + elif self._energy_id in ENERGY_DAY_COUNTERS: + last_reset = last_reset.replace(hour=0, minute=0, second=0, microsecond=0) + # elif self._energy_id in ENERGY_WEEK_COUNTERS: + # last_reset = last_reset - timedelta(days=last_reset.weekday()) + # last_reset = last_reset.replace( + # hour=0, + # minute=0, + # second=0, + # microsecond=0, + # ) + + pulses, last_update = pulse_collection.collected_pulses( + last_reset, self._is_consumption + ) + _LOGGER.debug( + "Counter Update | %s | pulses=%s | last_update=%s", + self._mac, + pulses, + last_update, + ) + if pulses is None or last_update is None: + return (None, None) + + self._last_update = last_update + self._last_reset = last_reset + self._pulses = pulses + + energy = self.energy + _LOGGER.debug("energy=%s on last_update=%s", energy, last_update) + return (energy, last_reset) diff --git a/plugwise_usb/nodes/helpers/firmware.py b/plugwise_usb/nodes/helpers/firmware.py new file mode 100644 index 000000000..471f1bb81 --- /dev/null +++ b/plugwise_usb/nodes/helpers/firmware.py @@ -0,0 +1,169 @@ +"""Firmware protocol support definitions. + +The minimum and maximum supported (custom) zigbee protocol versions +are based on the utc timestamp of firmware. + +The data is extracted from analyzing the "Plugwise.IO.dll" file of +the Plugwise source installation. + +""" + +from __future__ import annotations + +from datetime import UTC, datetime +from typing import Final, NamedTuple + +from ...api import NodeFeature + + +class SupportedVersions(NamedTuple): + """Range of supported version.""" + + min: float + max: float + + +# region - node firmware versions +CIRCLE_FIRMWARE_SUPPORT: Final = { + datetime(2008, 8, 26, 15, 46, tzinfo=UTC): SupportedVersions(min=1.0, max=1.1), + datetime(2009, 9, 8, 13, 50, 31, tzinfo=UTC): SupportedVersions(min=2.0, max=2.4), + datetime(2010, 4, 27, 11, 56, 23, tzinfo=UTC): SupportedVersions(min=2.0, max=2.4), + datetime(2010, 8, 4, 14, 9, 6, tzinfo=UTC): SupportedVersions(min=2.0, max=2.4), + datetime(2010, 8, 17, 7, 40, 37, tzinfo=UTC): SupportedVersions(min=2.0, max=2.4), + datetime(2010, 8, 31, 5, 55, 19, tzinfo=UTC): SupportedVersions(min=2.0, max=2.5), + datetime(2010, 8, 31, 10, 21, 2, tzinfo=UTC): SupportedVersions(min=2.0, max=2.4), + datetime(2010, 10, 7, 14, 46, 38, tzinfo=UTC): SupportedVersions(min=2.0, max=2.4), + datetime(2010, 11, 1, 13, 29, 38, tzinfo=UTC): SupportedVersions(min=2.0, max=2.4), + datetime(2011, 3, 25, 17, 40, 20, tzinfo=UTC): SupportedVersions(min=2.0, max=2.5), + datetime(2011, 5, 13, 7, 19, 23, tzinfo=UTC): SupportedVersions(min=2.0, max=2.5), + datetime(2011, 6, 27, 8, 52, 18, tzinfo=UTC): SupportedVersions(min=2.0, max=2.5), + # Legrand + datetime(2011, 11, 3, 12, 57, 57, tzinfo=UTC): SupportedVersions(min=2.0, max=2.6), + # Radio Test + datetime(2012, 4, 19, 14, 0, 42, tzinfo=UTC): SupportedVersions(min=2.0, max=2.5), + # Beta release + datetime(2015, 6, 18, 14, 42, 54, tzinfo=UTC): SupportedVersions(min=2.0, max=2.6), + # Proto release + datetime(2015, 6, 16, 21, 9, 10, tzinfo=UTC): SupportedVersions(min=2.0, max=2.6), + datetime(2015, 6, 18, 14, 0, 54, tzinfo=UTC): SupportedVersions(min=2.0, max=2.6), + # New Flash Update + datetime(2017, 7, 11, 16, 6, 59, tzinfo=UTC): SupportedVersions(min=2.0, max=2.6), +} + +CIRCLE_PLUS_FIRMWARE_SUPPORT: Final = { + datetime(2008, 8, 26, 15, 46, tzinfo=UTC): SupportedVersions(min=1.0, max=1.1), + datetime(2009, 9, 8, 14, 0, 32, tzinfo=UTC): SupportedVersions(min=2.0, max=2.4), + datetime(2010, 4, 27, 11, 54, 15, tzinfo=UTC): SupportedVersions(min=2.0, max=2.4), + datetime(2010, 8, 4, 12, 56, 59, tzinfo=UTC): SupportedVersions(min=2.0, max=2.4), + datetime(2010, 8, 17, 7, 37, 57, tzinfo=UTC): SupportedVersions(min=2.0, max=2.4), + datetime(2010, 8, 31, 10, 9, 18, tzinfo=UTC): SupportedVersions(min=2.0, max=2.4), + datetime(2010, 10, 7, 14, 49, 29, tzinfo=UTC): SupportedVersions(min=2.0, max=2.4), + datetime(2010, 11, 1, 13, 24, 49, tzinfo=UTC): SupportedVersions(min=2.0, max=2.4), + datetime(2011, 3, 25, 17, 37, 55, tzinfo=UTC): SupportedVersions(min=2.0, max=2.5), + datetime(2011, 5, 13, 7, 17, 7, tzinfo=UTC): SupportedVersions(min=2.0, max=2.5), + datetime(2011, 6, 27, 8, 47, 37, tzinfo=UTC): SupportedVersions(min=2.0, max=2.5), + # Legrand + datetime(2011, 11, 3, 12, 55, 23, tzinfo=UTC): SupportedVersions(min=2.0, max=2.6), + # Radio Test + datetime(2012, 4, 19, 14, 3, 55, tzinfo=UTC): SupportedVersions(min=2.0, max=2.5), + # SMA firmware 2015-06-16 + datetime(2015, 6, 18, 14, 42, 54, tzinfo=UTC): SupportedVersions(min=2.0, max=2.6), + # New Flash Update + datetime(2017, 7, 11, 16, 5, 57, tzinfo=UTC): SupportedVersions(min=2.0, max=2.6), +} + +SCAN_FIRMWARE_SUPPORT: Final = { + datetime(2010, 11, 4, 16, 58, 46, tzinfo=UTC): SupportedVersions(min=2.0, max=2.6), + # Beta Scan Release + datetime(2011, 1, 12, 8, 32, 56, tzinfo=UTC): SupportedVersions(min=2.0, max=2.5), + # Beta Scan Release + datetime(2011, 3, 4, 14, 43, 31, tzinfo=UTC): SupportedVersions(min=2.0, max=2.5), + # Scan RC1 + datetime(2011, 3, 28, 9, 0, 24, tzinfo=UTC): SupportedVersions(min=2.0, max=2.5), + datetime(2011, 5, 13, 7, 21, 55, tzinfo=UTC): SupportedVersions(min=2.0, max=2.5), + datetime(2011, 11, 3, 13, 0, 56, tzinfo=UTC): SupportedVersions(min=2.0, max=2.6), + # Legrand + datetime(2011, 6, 27, 8, 55, 44, tzinfo=UTC): SupportedVersions(min=2.0, max=2.5), + datetime(2017, 7, 11, 16, 8, 3, tzinfo=UTC): SupportedVersions(min=2.0, max=2.6), + # New Flash Update +} + +SENSE_FIRMWARE_SUPPORT: Final = { + # pre - internal test release - fixed version + datetime(2010, 12, 3, 10, 17, 7, tzinfo=UTC): SupportedVersions(min=2.0, max=2.5), + # Proto release, with reset and join bug fixed + datetime(2011, 1, 11, 14, 19, 36, tzinfo=UTC): SupportedVersions(min=2.0, max=2.5), + datetime(2011, 3, 4, 14, 52, 30, tzinfo=UTC): SupportedVersions(min=2.0, max=2.5), + datetime(2011, 3, 25, 17, 43, 2, tzinfo=UTC): SupportedVersions(min=2.0, max=2.5), + datetime(2011, 5, 13, 7, 24, 26, tzinfo=UTC): SupportedVersions(min=2.0, max=2.5), + datetime(2011, 6, 27, 8, 58, 19, tzinfo=UTC): SupportedVersions(min=2.0, max=2.5), + # Legrand + datetime(2011, 11, 3, 13, 7, 33, tzinfo=UTC): SupportedVersions(min=2.0, max=2.6), + # Radio Test + datetime(2012, 4, 19, 14, 10, 48, tzinfo=UTC): SupportedVersions(min=2.0, max=2.5), + # New Flash Update + datetime(2017, 7, 11, 16, 9, 5, tzinfo=UTC): SupportedVersions(min=2.0, max=2.6), +} + +SWITCH_FIRMWARE_SUPPORT: Final = { + datetime(2009, 9, 8, 14, 7, 4, tzinfo=UTC): SupportedVersions(min=2.0, max=2.4), + datetime(2010, 1, 16, 14, 7, 13, tzinfo=UTC): SupportedVersions(min=2.0, max=2.4), + datetime(2010, 4, 27, 11, 59, 31, tzinfo=UTC): SupportedVersions(min=2.0, max=2.4), + datetime(2010, 8, 4, 14, 15, 25, tzinfo=UTC): SupportedVersions(min=2.0, max=2.4), + datetime(2010, 8, 17, 7, 44, 24, tzinfo=UTC): SupportedVersions(min=2.0, max=2.4), + datetime(2010, 8, 31, 10, 23, 32, tzinfo=UTC): SupportedVersions(min=2.0, max=2.4), + datetime(2010, 10, 7, 14, 29, 55, tzinfo=UTC): SupportedVersions(min=2.0, max=2.4), + datetime(2010, 11, 1, 13, 41, 30, tzinfo=UTC): SupportedVersions(min=2.0, max=2.4), + datetime(2011, 3, 25, 17, 46, 41, tzinfo=UTC): SupportedVersions(min=2.0, max=2.5), + datetime(2011, 5, 13, 7, 26, 54, tzinfo=UTC): SupportedVersions(min=2.0, max=2.5), + datetime(2011, 6, 27, 9, 4, 10, tzinfo=UTC): SupportedVersions(min=2.0, max=2.5), + # Legrand + datetime(2011, 11, 3, 13, 10, 18, tzinfo=UTC): SupportedVersions(min=2.0, max=2.6), + # Radio Test + datetime(2012, 4, 19, 14, 10, 48, tzinfo=UTC): SupportedVersions(min=2.0, max=2.5), + # New Flash Update + datetime(2017, 7, 11, 16, 11, 10, tzinfo=UTC): SupportedVersions(min=2.0, max=2.6), +} + +CELSIUS_FIRMWARE_SUPPORT: Final = { + # Celsius Proto + datetime(2013, 9, 25, 15, 9, 44, tzinfo=UTC): SupportedVersions(min=2.0, max=2.6), + datetime(2013, 10, 11, 15, 15, 58, tzinfo=UTC): SupportedVersions(min=2.0, max=2.6), + datetime(2013, 10, 17, 10, 13, 12, tzinfo=UTC): SupportedVersions(min=2.0, max=2.6), + datetime(2013, 11, 19, 17, 35, 48, tzinfo=UTC): SupportedVersions(min=2.0, max=2.6), + datetime(2013, 12, 5, 16, 25, 33, tzinfo=UTC): SupportedVersions(min=2.0, max=2.6), + datetime(2013, 12, 11, 10, 53, 55, tzinfo=UTC): SupportedVersions(min=2.0, max=2.6), + datetime(2014, 1, 30, 8, 56, 21, tzinfo=UTC): SupportedVersions(min=2.0, max=2.6), + datetime(2014, 2, 3, 10, 9, 27, tzinfo=UTC): SupportedVersions(min=2.0, max=2.6), + datetime(2014, 3, 7, 16, 7, 42, tzinfo=UTC): SupportedVersions(min=2.0, max=2.6), + datetime(2014, 3, 24, 11, 12, 23, tzinfo=UTC): SupportedVersions(min=2.0, max=2.6), + # MSPBootloader Image - Required to allow + # a MSPBootload image for OTA update + datetime(2014, 4, 14, 15, 45, 26, tzinfo=UTC): SupportedVersions(min=2.0, max=2.6), + # CelsiusV Image + datetime(2014, 7, 23, 19, 24, 18, tzinfo=UTC): SupportedVersions(min=2.0, max=2.6), + # CelsiusV Image + datetime(2014, 9, 12, 11, 36, 40, tzinfo=UTC): SupportedVersions(min=2.0, max=2.6), + # New Flash Update + datetime(2017, 7, 11, 16, 2, 50, tzinfo=UTC): SupportedVersions(min=2.0, max=2.6), +} + +# endregion + +# region - node firmware based features + +FEATURE_SUPPORTED_AT_FIRMWARE: Final = { + NodeFeature.BATTERY: 2.0, + NodeFeature.INFO: 2.0, + NodeFeature.TEMPERATURE: 2.0, + NodeFeature.HUMIDITY: 2.0, + NodeFeature.ENERGY: 2.0, + NodeFeature.POWER: 2.0, + NodeFeature.RELAY: 2.0, + NodeFeature.RELAY_INIT: 2.6, + NodeFeature.MOTION: 2.0, + NodeFeature.MOTION_CONFIG: 2.0, + NodeFeature.SWITCH: 2.0, +} + +# endregion diff --git a/plugwise_usb/nodes/helpers/pulses.py b/plugwise_usb/nodes/helpers/pulses.py new file mode 100644 index 000000000..4db88920c --- /dev/null +++ b/plugwise_usb/nodes/helpers/pulses.py @@ -0,0 +1,1008 @@ +"""Energy pulse helper.""" + +from __future__ import annotations + +from dataclasses import dataclass +from datetime import UTC, datetime, timedelta +import logging +from typing import Final + +from ...constants import LOGADDR_MAX, MINUTE_IN_SECONDS, WEEK_IN_HOURS +from ...exceptions import EnergyError + +_LOGGER = logging.getLogger(__name__) +CONSUMED: Final = True +PRODUCED: Final = False +PRODUCERS: tuple[str] = ("000D6F00029C32C7") + +MAX_LOG_HOURS = WEEK_IN_HOURS + + +def calc_log_address(address: int, slot: int, offset: int) -> tuple[int, int]: + """Calculate address and slot for log based for specified offset.""" + + if offset < 0: + while offset + slot < 1: + address -= 1 + # Check for log address rollover + if address <= -1: + address = LOGADDR_MAX - 1 + offset += 4 + if offset > 0: + while offset + slot > 4: + address += 1 + # Check for log address rollover + if address >= LOGADDR_MAX: + address = 0 + offset -= 4 + return (address, slot + offset) + + +@dataclass +class PulseLogRecord: + """Total pulses collected at specific timestamp.""" + + timestamp: datetime + pulses: int + is_consumption: bool + + +class PulseCollection: + """Store consumed and produced energy pulses of the current interval and past (history log) intervals.""" + + def __init__(self, mac: str) -> None: + """Initialize PulseCollection class.""" + self._mac = mac + self._log_interval_consumption: int | None = None + self._log_interval_production: int | None = None + + self._last_log_address: int | None = None + self._last_log_slot: int | None = None + self._last_log_timestamp: datetime | None = None + self._first_log_address: int | None = None + self._first_log_slot: int | None = None + self._first_log_timestamp: datetime | None = None + + self._first_empty_log_address: int | None = None + self._first_empty_log_slot: int | None = None + self._last_empty_log_address: int | None = None + self._last_empty_log_slot: int | None = None + + self._last_log_consumption_timestamp: datetime | None = None + self._last_log_consumption_address: int | None = None + self._last_log_consumption_slot: int | None = None + self._first_log_consumption_timestamp: datetime | None = None + self._first_log_consumption_address: int | None = None + self._first_log_consumption_slot: int | None = None + self._next_log_consumption_timestamp: datetime | None = None + + self._last_log_production_timestamp: datetime | None = None + self._last_log_production_address: int | None = None + self._last_log_production_slot: int | None = None + self._first_log_production_timestamp: datetime | None = None + self._first_log_production_address: int | None = None + self._first_log_production_slot: int | None = None + self._next_log_production_timestamp: datetime | None = None + + self._rollover_consumption = False + self._rollover_production = False + + self._logs: dict[int, dict[int, PulseLogRecord]] | None = None + self._log_addresses_missing: list[int] | None = None + self._pulses_consumption: int | None = None + self._pulses_production: int | None = None + self._pulses_timestamp: datetime | None = None + + self._log_production = False # : bool | None = None + if mac in PRODUCERS: + self._log_production = True + + @property + def collected_logs(self) -> int: + """Total collected logs.""" + counter = 0 + if self._logs is None: + return counter + for address in self._logs: + counter += len(self._logs[address]) + return counter + + @property + def logs(self) -> dict[int, dict[int, PulseLogRecord]]: + """Return currently collected pulse logs in reversed order.""" + if self._logs is None: + return {} + sorted_log: dict[int, dict[int, PulseLogRecord]] = {} + skip_before = datetime.now(tz=UTC) - timedelta(hours=MAX_LOG_HOURS) + sorted_addresses = sorted(self._logs.keys(), reverse=True) + for address in sorted_addresses: + sorted_slots = sorted(self._logs[address].keys(), reverse=True) + for slot in sorted_slots: + if self._logs[address][slot].timestamp > skip_before: + if sorted_log.get(address) is None: + sorted_log[address] = {} + sorted_log[address][slot] = self._logs[address][slot] + return sorted_log + + @property + def last_log(self) -> tuple[int, int] | None: + """Return address and slot of last imported log.""" + if ( + self._last_log_consumption_address is None + or self._last_log_consumption_slot is None + ): + return None + return (self._last_log_consumption_address, self._last_log_consumption_slot) + + @property + def production_logging(self) -> bool | None: + """Indicate if production logging is active.""" + return self._log_production + + @property + def log_interval_consumption(self) -> int | None: + """Interval in minutes between last consumption pulse logs.""" + return self._log_interval_consumption + + @property + def log_interval_production(self) -> int | None: + """Interval in minutes between last production pulse logs.""" + return self._log_interval_production + + @property + def log_rollover(self) -> bool: + """Indicate if new log is required.""" + return self._rollover_consumption or self._rollover_production + + @property + def last_update(self) -> datetime | None: + """Return timestamp of last update.""" + return self._pulses_timestamp + + def collected_pulses( + self, from_timestamp: datetime, is_consumption: bool + ) -> tuple[int | None, datetime | None]: + """Calculate total pulses from given timestamp.""" + _LOGGER.debug( + "collected_pulses 1 | %s | is_cons=%s, from_timestamp=%s", + self._mac, + is_consumption, + from_timestamp, + ) + _LOGGER.debug("collected_pulses 1a | _log_production=%s", self._log_production) + if not is_consumption: + if self._log_production is None or not self._log_production: + return (None, None) + + if is_consumption and self._rollover_consumption: + _LOGGER.debug("collected_pulses 2 | %s | _rollover_consumption", self._mac) + return (None, None) + if not is_consumption and self._rollover_production: + _LOGGER.debug("collected_pulses 2 | %s | _rollover_production", self._mac) + return (None, None) + + if ( + log_pulses := self._collect_pulses_from_logs(from_timestamp, is_consumption) + ) is None: + _LOGGER.debug("collected_pulses 3 | %s | log_pulses:None", self._mac) + return (None, None) + + pulses: int | None = None + timestamp: datetime | None = None + if is_consumption and self._pulses_consumption is not None: + pulses = self._pulses_consumption + timestamp = self._pulses_timestamp + if not is_consumption and self._pulses_production is not None: + pulses = self._pulses_production + timestamp = self._pulses_timestamp + # _LOGGER.debug("collected_pulses | %s | pulses=%s", self._mac, pulses) + + if pulses is None: + _LOGGER.debug( + "collected_pulses 4 | %s | is_consumption=%s, pulses=None", + self._mac, + is_consumption, + ) + return (None, None) + _LOGGER.debug( + "collected_pulses 5 | pulses=%s | log_pulses=%s | consumption=%s at timestamp=%s", + pulses, + log_pulses, + is_consumption, + timestamp, + ) + return (pulses + log_pulses, timestamp) + + def _collect_pulses_from_logs( + self, from_timestamp: datetime, is_consumption: bool + ) -> int | None: + """Collect all pulses from logs.""" + if self._logs is None: + _LOGGER.debug("_collect_pulses_from_logs | %s | self._logs=None", self._mac) + return None + if is_consumption: + if self._last_log_consumption_timestamp is None: + _LOGGER.debug( + "_collect_pulses_from_logs | %s | self._last_log_consumption_timestamp=None", + self._mac, + ) + return None + if from_timestamp > self._last_log_consumption_timestamp: + return 0 + else: + if self._last_log_production_timestamp is None: + _LOGGER.debug( + "_collect_pulses_from_logs | %s | self._last_log_production_timestamp=None", + self._mac, + ) + return None + if from_timestamp > self._last_log_production_timestamp: + return 0 + + missing_logs = self._logs_missing(from_timestamp) + if missing_logs is None or missing_logs: + _LOGGER.debug( + "_collect_pulses_from_logs | %s | missing_logs=%s", + self._mac, + missing_logs, + ) + return None + + log_pulses = 0 + + for log_item in self._logs.values(): + for slot_item in log_item.values(): + if ( + slot_item.is_consumption == is_consumption + and slot_item.timestamp > from_timestamp + ): + log_pulses += slot_item.pulses + return log_pulses + + def update_pulse_counter( + self, pulses_consumed: int, pulses_produced: int, timestamp: datetime + ) -> None: + """Update pulse counter.""" + self._pulses_timestamp = timestamp + self._update_rollover() + if not (self._rollover_consumption or self._rollover_production): + # No rollover based on time, check rollover based on counter reset + # Required for special cases like nodes which have been power off for several days + if ( + self._pulses_consumption is not None + and self._pulses_consumption > pulses_consumed + ): + self._rollover_consumption = True + _LOGGER.debug( + "_rollover_consumption | self._pulses_consumption=%s > pulses_consumed=%s", + self._pulses_consumption, + pulses_consumed, + ) + + if ( + self._pulses_production is not None + and self._pulses_production < pulses_produced + ): + self._rollover_production = True + _LOGGER.debug( + "_rollover_production | self._pulses_production=%s < pulses_produced=%s", + self._pulses_production, + pulses_produced, + ) + + self._pulses_consumption = pulses_consumed + self._pulses_production = pulses_produced + + def _update_rollover(self) -> None: + """Update rollover states. Returns True if rollover is applicable.""" + if self._log_addresses_missing is not None and self._log_addresses_missing: + return + if ( + self._pulses_timestamp is None + or self._last_log_consumption_timestamp is None + or self._next_log_consumption_timestamp is None + ): + # Unable to determine rollover + return + if self._pulses_timestamp > self._next_log_consumption_timestamp: + self._rollover_consumption = True + _LOGGER.debug( + "_update_rollover | %s | set consumption rollover => pulses newer", + self._mac, + ) + elif self._pulses_timestamp < self._last_log_consumption_timestamp: + self._rollover_consumption = True + _LOGGER.debug( + "_update_rollover | %s | set consumption rollover => log newer", + self._mac, + ) + elif ( + self._last_log_consumption_timestamp + < self._pulses_timestamp + < self._next_log_consumption_timestamp + ): + if self._rollover_consumption: + _LOGGER.debug("_update_rollover | %s | reset consumption", self._mac) + self._rollover_consumption = False + else: + _LOGGER.debug("_update_rollover | %s | unexpected consumption", self._mac) + + if not self._log_production: + return + + if ( + self._last_log_production_timestamp is None + or self._next_log_production_timestamp is None + ): + # Unable to determine rollover + return + if self._pulses_timestamp > self._next_log_production_timestamp: + self._rollover_production = True + _LOGGER.debug( + "_update_rollover | %s | set production rollover => pulses newer", + self._mac, + ) + elif self._pulses_timestamp < self._last_log_production_timestamp: + self._rollover_production = True + _LOGGER.debug( + "_update_rollover | %s | reset production rollover => log newer", + self._mac, + ) + elif ( + self._last_log_production_timestamp + < self._pulses_timestamp + < self._next_log_production_timestamp + ): + if self._rollover_production: + _LOGGER.debug("_update_rollover | %s | reset production", self._mac) + self._rollover_production = False + else: + _LOGGER.debug("_update_rollover | %s | unexpected production", self._mac) + + def add_empty_log(self, address: int, slot: int) -> None: + """Add empty energy log record to mark any start of beginning of energy log collection.""" + recalculate = False + if self._first_log_address is None or address <= self._first_log_address: + if ( + self._first_empty_log_address is None + or self._first_empty_log_address < address + ): + self._first_empty_log_address = address + self._first_empty_log_slot = slot + recalculate = True + elif self._first_empty_log_address == address and ( + self._first_empty_log_slot is None or self._first_empty_log_slot < slot + ): + self._first_empty_log_slot = slot + recalculate = True + + if self._last_log_address is None or address >= self._last_log_address: + if ( + self._last_empty_log_address is None + or self._last_empty_log_address > address + ): + self._last_empty_log_address = address + self._last_empty_log_slot = slot + recalculate = True + elif self._last_empty_log_address == address and ( + self._last_empty_log_slot is None or self._last_empty_log_slot > slot + ): + self._last_empty_log_slot = slot + recalculate = True + if recalculate: + self.recalculate_missing_log_addresses() + + # pylint: disable=too-many-arguments + def add_log( + self, + address: int, + slot: int, + timestamp: datetime, + pulses: int, + import_only: bool = False, + ) -> bool: + """Store pulse log.""" + _LOGGER.debug( + "add_log | address=%s | slot=%s | timestamp=%s | pulses=%s | import_only=%s", + address, + slot, + timestamp, + pulses, + import_only, + ) + direction = CONSUMED + if self._log_production and pulses < 0: + direction = PRODUCED + + log_record = PulseLogRecord(timestamp, pulses, direction) + if not self._add_log_record(address, slot, log_record): + if not self._log_exists(address, slot): + return False + if address != self._last_log_address and slot != self._last_log_slot: + return False + # self._update_log_direction(address, slot, timestamp) + self._update_log_references(address, slot) + self._update_log_interval() + self._update_rollover() + if not import_only: + self.recalculate_missing_log_addresses() + return True + + def recalculate_missing_log_addresses(self) -> None: + """Recalculate missing log addresses.""" + self._log_addresses_missing = self._logs_missing( + datetime.now(tz=UTC) - timedelta(hours=MAX_LOG_HOURS) + ) + + def _add_log_record( + self, address: int, slot: int, log_record: PulseLogRecord + ) -> bool: + """Add log record. + + Return False if log record already exists, or is not required because its timestamp is expired. + """ + if self._logs is None: + self._logs = {address: {slot: log_record}} + return True + if self._log_exists(address, slot): + return False + # Drop useless log records when we have at least 4 logs + if self.collected_logs > 4 and log_record.timestamp < ( + datetime.now(tz=UTC) - timedelta(hours=MAX_LOG_HOURS) + ): + return False + if self._logs.get(address) is None: + self._logs[address] = {slot: log_record} + self._logs[address][slot] = log_record + if ( + address == self._first_empty_log_address + and slot == self._first_empty_log_slot + ): + self._first_empty_log_address = None + self._first_empty_log_slot = None + if ( + address == self._last_empty_log_address + and slot == self._last_empty_log_slot + ): + self._last_empty_log_address = None + self._last_empty_log_slot = None + return True + + def _update_log_direction( + self, address: int, slot: int, timestamp: datetime + ) -> None: + """Update Energy direction of log record. + + Two subsequential logs with the same timestamp indicates the first + is consumption and second production. + """ + if self._logs is None: + return + + prev_address, prev_slot = calc_log_address(address, slot, -1) + if self._log_exists(prev_address, prev_slot): + if self._logs[prev_address][prev_slot].timestamp == timestamp: + # Given log is the second log with same timestamp, + # mark direction as production + self._logs[address][slot].is_consumption = False + self._logs[prev_address][prev_slot].is_consumption = True + self._log_production = True + elif self._log_production: + self._logs[address][slot].is_consumption = True + if self._logs[prev_address][prev_slot].is_consumption: + self._logs[prev_address][prev_slot].is_consumption = False + self._reset_log_references() + elif self._log_production is None: + self._log_production = False + + next_address, next_slot = calc_log_address(address, slot, 1) + if self._log_exists(next_address, next_slot): + if self._logs[next_address][next_slot].timestamp == timestamp: + # Given log is the first log with same timestamp, + # mark direction as production of next log + self._logs[address][slot].is_consumption = True + if self._logs[next_address][next_slot].is_consumption: + self._logs[next_address][next_slot].is_consumption = False + self._reset_log_references() + self._log_production = True + elif self._log_production: + self._logs[address][slot].is_consumption = False + self._logs[next_address][next_slot].is_consumption = True + elif self._log_production is None: + self._log_production = False + + def _update_log_interval(self) -> None: + """Update the detected log interval based on the most recent two logs.""" + if self._logs is None or self._log_production is None: + _LOGGER.debug( + "_update_log_interval | %s | _logs=%s, _log_production=%s", + self._mac, + self._logs, + self._log_production, + ) + return + + last_cons_address, last_cons_slot = self._last_log_reference( + is_consumption=True + ) + if last_cons_address is None or last_cons_slot is None: + return + + # Update interval of consumption + last_cons_timestamp = self._logs[last_cons_address][last_cons_slot].timestamp + address, slot = calc_log_address(last_cons_address, last_cons_slot, -1) + while self._log_exists(address, slot): + if self._logs[address][slot].is_consumption: + delta1: timedelta = ( + last_cons_timestamp - self._logs[address][slot].timestamp + ) + self._log_interval_consumption = int( + delta1.total_seconds() / MINUTE_IN_SECONDS + ) + break + + address, slot = calc_log_address(address, slot, -1) + + if ( + self._log_interval_consumption is not None + and self._last_log_consumption_timestamp is not None + ): + self._next_log_consumption_timestamp = ( + self._last_log_consumption_timestamp + + timedelta(minutes=self._log_interval_consumption) + ) + + if not self._log_production: + return + + # Update interval of production + last_prod_address, last_prod_slot = self._last_log_reference( + is_consumption=False + ) + if last_prod_address is None or last_prod_slot is None: + return + + last_prod_timestamp = self._logs[last_prod_address][last_prod_slot].timestamp + address, slot = calc_log_address(last_prod_address, last_prod_slot, -1) + while self._log_exists(address, slot): + if not self._logs[address][slot].is_consumption: + delta2: timedelta = ( + last_prod_timestamp - self._logs[address][slot].timestamp + ) + self._log_interval_production = int( + delta2.total_seconds() / MINUTE_IN_SECONDS + ) + break + + address, slot = calc_log_address(address, slot, -1) + + if ( + self._log_interval_production is not None + and self._last_log_production_timestamp is not None + ): + self._next_log_production_timestamp = ( + self._last_log_production_timestamp + + timedelta(minutes=self._log_interval_production) + ) + + def _log_exists(self, address: int, slot: int) -> bool: + if self._logs is None: + return False + if self._logs.get(address) is None: + return False + if self._logs[address].get(slot) is None: + return False + return True + + def _update_last_log_reference( + self, address: int, slot: int, timestamp: datetime, is_consumption: bool + ) -> None: + """Update references to last (most recent) log record.""" + if self._last_log_timestamp is None or self._last_log_timestamp < timestamp: + self._last_log_address = address + self._last_log_slot = slot + self._last_log_timestamp = timestamp + elif self._last_log_timestamp == timestamp and not is_consumption: + self._last_log_address = address + self._last_log_slot = slot + self._last_log_timestamp = timestamp + + def _update_last_consumption_log_reference( + self, address: int, slot: int, timestamp: datetime + ) -> None: + """Update references to last (most recent) log consumption record.""" + if ( + self._last_log_consumption_timestamp is None + or self._last_log_consumption_timestamp <= timestamp + ): + self._last_log_consumption_timestamp = timestamp + self._last_log_consumption_address = address + self._last_log_consumption_slot = slot + + def _update_last_production_log_reference( + self, address: int, slot: int, timestamp: datetime + ) -> None: + """Update references to last (most recent) log production record.""" + if ( + self._last_log_production_timestamp is None + or self._last_log_production_timestamp <= timestamp + ): + self._last_log_production_timestamp = timestamp + self._last_log_production_address = address + self._last_log_production_slot = slot + + def _reset_log_references(self) -> None: + """Reset log references.""" + self._last_log_consumption_address = None + self._last_log_consumption_slot = None + self._last_log_consumption_timestamp = None + self._first_log_consumption_address = None + self._first_log_consumption_slot = None + self._first_log_consumption_timestamp = None + self._last_log_production_address = None + self._last_log_production_slot = None + self._last_log_production_timestamp = None + self._first_log_production_address = None + self._first_log_production_slot = None + self._first_log_production_timestamp = None + if self._logs is None: + return + for address in self._logs: + for slot, log_record in self._logs[address].items(): + if log_record.is_consumption: + if self._last_log_consumption_timestamp is None: + self._last_log_consumption_timestamp = log_record.timestamp + if self._last_log_consumption_timestamp <= log_record.timestamp: + self._last_log_consumption_timestamp = log_record.timestamp + self._last_log_consumption_address = address + self._last_log_consumption_slot = slot + + if self._first_log_consumption_timestamp is None: + self._first_log_consumption_timestamp = log_record.timestamp + if self._first_log_consumption_timestamp >= log_record.timestamp: + self._first_log_consumption_timestamp = log_record.timestamp + self._first_log_consumption_address = address + self._first_log_consumption_slot = slot + else: + if self._last_log_production_timestamp is None: + self._last_log_production_timestamp = log_record.timestamp + if self._last_log_production_timestamp <= log_record.timestamp: + self._last_log_production_timestamp = log_record.timestamp + self._last_log_production_address = address + self._last_log_production_slot = slot + + if self._first_log_production_timestamp is None: + self._first_log_production_timestamp = log_record.timestamp + if self._first_log_production_timestamp > log_record.timestamp: + self._first_log_production_timestamp = log_record.timestamp + self._first_log_production_address = address + self._first_log_production_slot = slot + + def _update_first_log_reference( + self, address: int, slot: int, timestamp: datetime, is_consumption: bool + ) -> None: + """Update references to first (oldest) log record.""" + if self._first_log_timestamp is None or self._first_log_timestamp > timestamp: + self._first_log_address = address + self._first_log_slot = slot + self._first_log_timestamp = timestamp + elif self._first_log_timestamp == timestamp and is_consumption: + self._first_log_address = address + self._first_log_slot = slot + self._first_log_timestamp = timestamp + + def _update_first_consumption_log_reference( + self, address: int, slot: int, timestamp: datetime + ) -> None: + """Update references to first (oldest) log consumption record.""" + if ( + self._first_log_consumption_timestamp is None + or self._first_log_consumption_timestamp >= timestamp + ): + self._first_log_consumption_timestamp = timestamp + self._first_log_consumption_address = address + self._first_log_consumption_slot = slot + + def _update_first_production_log_reference( + self, address: int, slot: int, timestamp: datetime + ) -> None: + """Update references to first (oldest) log production record.""" + if ( + self._first_log_production_timestamp is None + or self._first_log_production_timestamp >= timestamp + ): + self._first_log_production_timestamp = timestamp + self._first_log_production_address = address + self._first_log_production_slot = slot + + def _update_log_references(self, address: int, slot: int) -> None: + """Update next expected log timestamps.""" + if self._logs is None: + return + log_time_stamp = self._logs[address][slot].timestamp + is_consumption = self._logs[address][slot].is_consumption + + # Update log references + self._update_first_log_reference(address, slot, log_time_stamp, is_consumption) + self._update_last_log_reference(address, slot, log_time_stamp, is_consumption) + + if is_consumption: + self._update_first_consumption_log_reference(address, slot, log_time_stamp) + self._update_last_consumption_log_reference(address, slot, log_time_stamp) + elif self._log_production: + self._update_first_production_log_reference(address, slot, log_time_stamp) + self._update_last_production_log_reference(address, slot, log_time_stamp) + + @property + def log_addresses_missing(self) -> list[int] | None: + """Return the addresses of missing logs.""" + return self._log_addresses_missing + + def _last_log_reference( + self, is_consumption: bool | None = None + ) -> tuple[int | None, int | None]: + """Address and slot of last log.""" + if is_consumption is None: + return (self._last_log_address, self._last_log_slot) + + if is_consumption: + return (self._last_log_consumption_address, self._last_log_consumption_slot) + + return (self._last_log_production_address, self._last_log_production_slot) + + def _first_log_reference( + self, is_consumption: bool | None = None + ) -> tuple[int | None, int | None]: + """Address and slot of first log.""" + if is_consumption is None: + return (self._first_log_address, self._first_log_slot) + + if is_consumption: + return ( + self._first_log_consumption_address, + self._first_log_consumption_slot, + ) + + return (self._first_log_production_address, self._first_log_production_slot) + + def _logs_missing(self, from_timestamp: datetime) -> list[int] | None: + """Calculate list of missing log addresses.""" + if self._logs is None: + self._log_addresses_missing = None + return None + + if self.collected_logs < 2: + return None + + last_address, last_slot = self._last_log_reference() + if last_address is None or last_slot is None: + _LOGGER.debug( + "_logs_missing | %s | last_address=%s, last_slot=%s", + self._mac, + last_address, + last_slot, + ) + return None + + first_address, first_slot = self._first_log_reference() + if first_address is None or first_slot is None: + _LOGGER.debug( + "_logs_missing | %s | first_address=%s, first_slot=%s", + self._mac, + first_address, + first_slot, + ) + return None + + missing = [] + _LOGGER.debug( + "_logs_missing | %s | first_address=%s, last_address=%s", + self._mac, + first_address, + last_address, + ) + + if ( + last_address == first_address + and last_slot == first_slot + and self._logs[first_address][first_slot].timestamp + == self._logs[last_address][last_slot].timestamp + ): + # Power consumption logging, so we need at least 4 logs. + return None + + # Collect any missing address in current range + address = last_address + slot = last_slot + while not (address == first_address and slot == first_slot): + address, slot = calc_log_address(address, slot, -1) + if address in missing: + continue + if not self._log_exists(address, slot): + missing.append(address) + continue + if self._logs[address][slot].timestamp <= from_timestamp: + break + + # return missing logs in range first + if len(missing) > 0: + _LOGGER.debug( + "_logs_missing | %s | missing in range=%s", self._mac, missing + ) + return missing + + if first_address not in self._logs: + return missing + + if first_slot not in self._logs[first_address]: + return missing + + if self._logs[first_address][first_slot].timestamp < from_timestamp: + return missing + + # Check if we are able to calculate log interval + address, slot = calc_log_address(first_address, first_slot, -1) + log_interval: int | None = None + if self._log_interval_consumption is not None: + log_interval = self._log_interval_consumption + elif self._log_interval_production is not None: + log_interval = self._log_interval_production + if ( + self._log_interval_production is not None + and log_interval is not None + and self._log_interval_production < log_interval + ): + log_interval = self._log_interval_production + if log_interval is None: + return None + + # We have an suspected interval, so try to calculate missing log addresses prior to first collected log + calculated_timestamp = self._logs[first_address][ + first_slot + ].timestamp - timedelta(minutes=log_interval) + while from_timestamp < calculated_timestamp: + if ( + address == self._first_empty_log_address + and slot == self._first_empty_log_slot + ): + break + if address not in missing: + missing.append(address) + calculated_timestamp -= timedelta(minutes=log_interval) + address, slot = calc_log_address(address, slot, -1) + + missing.sort(reverse=True) + _LOGGER.debug("_logs_missing | %s | calculated missing=%s", self._mac, missing) + return missing + + def _last_known_duration(self) -> timedelta: + """Duration for last known logs.""" + if self._logs is None: + raise EnergyError("Unable to return last known duration without any logs") + if len(self._logs) < 2: + return timedelta(hours=1) + address, slot = self._last_log_reference() + if address is None or slot is None: + raise EnergyError("Unable to return last known duration without any logs") + last_known_timestamp = self._logs[address][slot].timestamp + address, slot = calc_log_address(address, slot, -1) + while ( + self._log_exists(address, slot) + or self._logs[address][slot].timestamp == last_known_timestamp + ): + address, slot = calc_log_address(address, slot, -1) + return self._logs[address][slot].timestamp - last_known_timestamp + + def _missing_addresses_before( + self, address: int, slot: int, target: datetime + ) -> list[int]: + """Return list of missing address(es) prior to given log timestamp.""" + addresses: list[int] = [] + if self._logs is None or target >= self._logs[address][slot].timestamp: + return addresses + + # default interval + calc_interval_cons = timedelta(hours=1) + if ( + self._log_interval_consumption is not None + and self._log_interval_consumption > 0 + ): + # Use consumption interval + calc_interval_cons = timedelta(minutes=self._log_interval_consumption) + if self._log_interval_consumption == 0: + pass + + if self._log_production is False: + expected_timestamp = ( + self._logs[address][slot].timestamp - calc_interval_cons + ) + address, slot = calc_log_address(address, slot, -1) + while expected_timestamp > target and address > 0: + if address not in addresses: + addresses.append(address) + expected_timestamp -= calc_interval_cons + address, slot = calc_log_address(address, slot, -1) + else: + # Production logging active + calc_interval_prod = timedelta(hours=1) + if ( + self._log_interval_production is not None + and self._log_interval_production > 0 + ): + calc_interval_prod = timedelta(minutes=self._log_interval_production) + + expected_timestamp_cons = ( + self._logs[address][slot].timestamp - calc_interval_cons + ) + expected_timestamp_prod = ( + self._logs[address][slot].timestamp - calc_interval_prod + ) + + address, slot = calc_log_address(address, slot, -1) + while ( + expected_timestamp_cons > target or expected_timestamp_prod > target + ) and address > 0: + if address not in addresses: + addresses.append(address) + if expected_timestamp_prod > expected_timestamp_cons: + expected_timestamp_prod -= calc_interval_prod + else: + expected_timestamp_cons -= calc_interval_cons + address, slot = calc_log_address(address, slot, -1) + + return addresses + + def _missing_addresses_after( + self, address: int, slot: int, target: datetime + ) -> list[int]: + """Return list of any missing address(es) after given log timestamp.""" + addresses: list[int] = [] + + if self._logs is None: + return addresses + + # default interval + calc_interval_cons = timedelta(hours=1) + if ( + self._log_interval_consumption is not None + and self._log_interval_consumption > 0 + ): + # Use consumption interval + calc_interval_cons = timedelta(minutes=self._log_interval_consumption) + + if self._log_production is False: + expected_timestamp = ( + self._logs[address][slot].timestamp + calc_interval_cons + ) + address, slot = calc_log_address(address, slot, 1) + while expected_timestamp < target: + address, slot = calc_log_address(address, slot, 1) + expected_timestamp += timedelta(hours=1) + if address not in addresses: + addresses.append(address) + return addresses + + # Production logging active + calc_interval_prod = timedelta(hours=1) + if ( + self._log_interval_production is not None + and self._log_interval_production > 0 + ): + calc_interval_prod = timedelta(minutes=self._log_interval_production) + + expected_timestamp_cons = ( + self._logs[address][slot].timestamp + calc_interval_cons + ) + expected_timestamp_prod = ( + self._logs[address][slot].timestamp + calc_interval_prod + ) + address, slot = calc_log_address(address, slot, 1) + while expected_timestamp_cons < target or expected_timestamp_prod < target: + if address not in addresses: + addresses.append(address) + if expected_timestamp_prod < expected_timestamp_cons: + expected_timestamp_prod += calc_interval_prod + else: + expected_timestamp_cons += calc_interval_cons + address, slot = calc_log_address(address, slot, 1) + return addresses diff --git a/plugwise_usb/nodes/helpers/py.typed b/plugwise_usb/nodes/helpers/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/plugwise_usb/nodes/helpers/subscription.py b/plugwise_usb/nodes/helpers/subscription.py new file mode 100644 index 000000000..a3b2c0554 --- /dev/null +++ b/plugwise_usb/nodes/helpers/subscription.py @@ -0,0 +1,64 @@ +"""Base class for plugwise node publisher.""" + +from __future__ import annotations + +from asyncio import gather +from collections.abc import Callable, Coroutine +from dataclasses import dataclass +from typing import Any + + +from ...api import NodeFeature + + +@dataclass +class NodeFeatureSubscription: + """Subscription registration details for node feature.""" + + callback_fn: Callable[[NodeFeature, Any], Coroutine[Any, Any, None]] + features: tuple[NodeFeature, ...] + + +class FeaturePublisher: + """Base Class to call awaitable of subscription when event happens.""" + def __init__(self) -> None: + self._feature_update_subscribers: dict[ + Callable[[], None], + NodeFeatureSubscription, + ] = {} + + + def subscribe_to_feature_update( + self, + node_feature_callback: Callable[[NodeFeature, Any], Coroutine[Any, Any, None]], + features: tuple[NodeFeature, ...], + ) -> Callable[[], None]: + """Subscribe callback when specified NodeFeature state updates. + + Returns the function to be called to unsubscribe later. + """ + + def remove_subscription() -> None: + """Remove stick feature subscription.""" + self._feature_update_subscribers.pop(remove_subscription) + + self._feature_update_subscribers[remove_subscription] = NodeFeatureSubscription( + node_feature_callback, + features, + ) + return remove_subscription + + async def publish_feature_update_to_subscribers( + self, + feature: NodeFeature, + state: Any, + ) -> None: + """Publish feature to applicable subscribers.""" + callback_list: list[Coroutine[Any, Any, None]] = [] + for node_feature_subscription in list( + self._feature_update_subscribers.values() + ): + if feature in node_feature_subscription.features: + callback_list.append(node_feature_subscription.callback_fn(feature, state)) + if len(callback_list) > 0: + await gather(*callback_list) diff --git a/plugwise_usb/nodes/node.py b/plugwise_usb/nodes/node.py new file mode 100644 index 000000000..1f937b99c --- /dev/null +++ b/plugwise_usb/nodes/node.py @@ -0,0 +1,774 @@ +"""Base class of Plugwise node device.""" + +from __future__ import annotations + +from abc import ABC +from asyncio import Task, create_task +from collections.abc import Awaitable, Callable +from datetime import UTC, datetime, timedelta +import logging +from typing import Any + +from ..api import ( + AvailableState, + BatteryConfig, + EnergyStatistics, + MotionConfig, + MotionSensitivity, + MotionState, + NetworkStatistics, + NodeEvent, + NodeFeature, + NodeInfo, + NodeType, + PowerStatistics, + RelayConfig, + RelayState, +) +from ..connection import StickController +from ..constants import SUPPRESS_INITIALIZATION_WARNINGS, UTF8 +from ..exceptions import FeatureError, NodeError +from ..helpers.util import version_to_model +from ..messages.requests import NodeInfoRequest, NodePingRequest +from ..messages.responses import NodeInfoResponse, NodePingResponse +from .helpers import raise_not_loaded +from .helpers.cache import NodeCache +from .helpers.firmware import FEATURE_SUPPORTED_AT_FIRMWARE, SupportedVersions +from .helpers.subscription import FeaturePublisher + +_LOGGER = logging.getLogger(__name__) + + +NODE_FEATURES = ( + NodeFeature.AVAILABLE, + NodeFeature.INFO, + NodeFeature.PING, +) + + +CACHE_FIRMWARE = "firmware" +CACHE_NODE_TYPE = "node_type" +CACHE_HARDWARE = "hardware" +CACHE_NODE_INFO_TIMESTAMP = "node_info_timestamp" + + +class PlugwiseBaseNode(FeaturePublisher, ABC): + """Abstract Base Class for a Plugwise node.""" + + def __init__( + self, + mac: str, + address: int, + controller: StickController, + loaded_callback: Callable[[NodeEvent, str], Awaitable[None]], + ): + """Initialize Plugwise base node class.""" + super().__init__() + self._loaded_callback = loaded_callback + self._message_subscribe = controller.subscribe_to_messages + self._features: tuple[NodeFeature, ...] = NODE_FEATURES + self._last_seen = datetime.now(tz=UTC) + self._node_info = NodeInfo(mac, address) + self._ping = NetworkStatistics() + self._mac_in_bytes = bytes(mac, encoding=UTF8) + self._mac_in_str = mac + self._send = controller.send + self._cache_enabled: bool = False + self._cache_folder_create: bool = False + self._cache_save_task: Task[None] | None = None + self._node_cache = NodeCache(mac, "") + # Sensors + self._available: bool = False + self._connected: bool = False + self._initialized: bool = False + self._initialization_delay_expired: datetime | None = None + self._loaded: bool = False + self._node_protocols: SupportedVersions | None = None + + # Node info + self._current_log_address: int | None = None + + # region Properties + + @property + def available(self) -> bool: + """Return network availability state.""" + return self._available + + @property + def available_state(self) -> AvailableState: + """Network availability state.""" + return AvailableState( + self._available, + self._last_seen, + ) + + @property + @raise_not_loaded + def battery_config(self) -> BatteryConfig: + """Battery related configuration settings.""" + if NodeFeature.BATTERY not in self._features: + raise FeatureError( + f"Battery configuration property is not supported for node {self.mac}" + ) + raise NotImplementedError() + + @property + @raise_not_loaded + def clock_sync(self) -> bool: + """Indicate if the internal clock must be synced.""" + if NodeFeature.BATTERY not in self._features: + raise FeatureError( + f"Clock sync property is not supported for node {self.mac}" + ) + raise NotImplementedError() + + @property + def cache_folder(self) -> str: + """Return path to cache folder.""" + return self._node_cache.cache_root_directory + + @cache_folder.setter + def cache_folder(self, cache_folder: str) -> None: + """Set path to cache folder.""" + self._node_cache.cache_root_directory = cache_folder + + @property + def cache_folder_create(self) -> bool: + """Return if cache folder must be create when it does not exists.""" + return self._cache_folder_create + + @cache_folder_create.setter + def cache_folder_create(self, enable: bool = True) -> None: + """Enable or disable creation of cache folder.""" + self._cache_folder_create = enable + + @property + def cache_enabled(self) -> bool: + """Return usage of cache.""" + return self._cache_enabled + + @cache_enabled.setter + def cache_enabled(self, enable: bool) -> None: + """Enable or disable usage of cache.""" + self._cache_enabled = enable + + @property + @raise_not_loaded + def energy(self) -> EnergyStatistics: + """Energy statistics.""" + if NodeFeature.POWER not in self._features: + raise FeatureError(f"Energy state is not supported for node {self.mac}") + raise NotImplementedError() + + @property + @raise_not_loaded + def energy_consumption_interval(self) -> int | None: + """Interval (minutes) energy consumption counters are locally logged at Circle devices.""" + if NodeFeature.ENERGY not in self._features: + raise FeatureError( + f"Energy log interval is not supported for node {self.mac}" + ) + raise NotImplementedError() + + @property + @raise_not_loaded + def energy_production_interval(self) -> int | None: + """Interval (minutes) energy production counters are locally logged at Circle devices.""" + if NodeFeature.ENERGY not in self._features: + raise FeatureError( + f"Energy log interval is not supported for node {self.mac}" + ) + raise NotImplementedError() + + @property + def features(self) -> tuple[NodeFeature, ...]: + """Supported feature types of node.""" + return self._features + + @property + @raise_not_loaded + def humidity(self) -> float: + """Humidity state.""" + if NodeFeature.HUMIDITY not in self._features: + raise FeatureError(f"Humidity state is not supported for node {self.mac}") + raise NotImplementedError() + + @property + def is_battery_powered(self) -> bool: + """Return if node is battery powered.""" + return self._node_info.is_battery_powered + + @property + def is_loaded(self) -> bool: + """Return load status.""" + return self._loaded + + @property + def last_seen(self) -> datetime: + """Timestamp of last network activity.""" + return self._last_seen + + @property + def name(self) -> str: + """Return name of node.""" + if self._node_info.name is not None: + return self._node_info.name + return self._mac_in_str + + @property + def network_address(self) -> int: + """Zigbee network registration address.""" + return self._node_info.zigbee_address + + @property + def node_info(self) -> NodeInfo: + """Node information.""" + return self._node_info + + @property + def mac(self) -> str: + """Zigbee mac address of node.""" + return self._mac_in_str + + @property + @raise_not_loaded + def motion(self) -> bool: + """Motion detection value.""" + if NodeFeature.MOTION not in self._features: + raise FeatureError(f"Motion state is not supported for node {self.mac}") + raise NotImplementedError() + + @property + @raise_not_loaded + def motion_config(self) -> MotionConfig: + """Motion configuration settings.""" + if NodeFeature.MOTION not in self._features: + raise FeatureError( + f"Motion configuration is not supported for node {self.mac}" + ) + raise NotImplementedError() + + @property + @raise_not_loaded + def motion_state(self) -> MotionState: + """Motion detection state.""" + if NodeFeature.MOTION not in self._features: + raise FeatureError(f"Motion state is not supported for node {self.mac}") + raise NotImplementedError() + + @property + def ping_stats(self) -> NetworkStatistics: + """Ping statistics.""" + return self._ping + + @property + @raise_not_loaded + def power(self) -> PowerStatistics: + """Power statistics.""" + if NodeFeature.POWER not in self._features: + raise FeatureError(f"Power state is not supported for node {self.mac}") + raise NotImplementedError() + + @property + @raise_not_loaded + def relay_state(self) -> RelayState: + """State of relay.""" + if NodeFeature.RELAY not in self._features: + raise FeatureError(f"Relay state is not supported for node {self.mac}") + raise NotImplementedError() + + @property + @raise_not_loaded + def relay(self) -> bool: + """Relay value.""" + if NodeFeature.RELAY not in self._features: + raise FeatureError(f"Relay value is not supported for node {self.mac}") + raise NotImplementedError() + + @property + @raise_not_loaded + def relay_config(self) -> RelayConfig: + """Relay configuration.""" + if NodeFeature.RELAY_INIT not in self._features: + raise FeatureError( + f"Relay configuration is not supported for node {self.mac}" + ) + raise NotImplementedError() + + @property + @raise_not_loaded + def switch(self) -> bool: + """Switch button value.""" + if NodeFeature.SWITCH not in self._features: + raise FeatureError(f"Switch value is not supported for node {self.mac}") + raise NotImplementedError() + + @property + @raise_not_loaded + def temperature(self) -> float: + """Temperature value.""" + if NodeFeature.TEMPERATURE not in self._features: + raise FeatureError( + f"Temperature state is not supported for node {self.mac}" + ) + raise NotImplementedError() + + # endregion + + def _setup_protocol( + self, + firmware: dict[datetime, SupportedVersions], + node_features: tuple[NodeFeature, ...], + ) -> None: + """Determine protocol version based on firmware version and enable supported additional supported features.""" + if self._node_info.firmware is None: + return + self._node_protocols = firmware.get(self._node_info.firmware, None) + if self._node_protocols is None: + _LOGGER.warning( + "Failed to determine the protocol version for node %s (%s) based on firmware version %s of list %s", + self._node_info.mac, + self.__class__.__name__, + self._node_info.firmware, + str(firmware.keys()), + ) + return + for feature in node_features: + if ( + required_version := FEATURE_SUPPORTED_AT_FIRMWARE.get(feature) + ) is not None: + if ( + self._node_protocols.min + <= required_version + <= self._node_protocols.max + and feature not in self._features + ): + self._features += (feature,) + self._node_info.features = self._features + + async def reconnect(self) -> None: + """Reconnect node to Plugwise Zigbee network.""" + if await self.ping_update() is not None: + self._connected = True + await self._available_update_state(True, None) + + async def disconnect(self) -> None: + """Disconnect node from Plugwise Zigbee network.""" + self._connected = False + await self._available_update_state(False) + + async def scan_calibrate_light(self) -> bool: + """Request to calibration light sensitivity of Scan device. Returns True if successful.""" + raise NotImplementedError() + + async def load(self) -> bool: + """Load configuration and activate node features.""" + raise NotImplementedError() + + async def _load_cache_file(self) -> bool: + """Load states from previous cached information.""" + if self._loaded: + return True + if not self._cache_enabled: + _LOGGER.warning( + "Unable to load node %s from cache because caching is disabled", + self.mac, + ) + return False + if not self._node_cache.initialized: + await self._node_cache.initialize_cache(self._cache_folder_create) + return await self._node_cache.restore_cache() + + async def clear_cache(self) -> None: + """Clear current cache.""" + if self._node_cache is not None: + await self._node_cache.clear_cache() + + async def _load_from_cache(self) -> bool: + """Load states from previous cached information. Return True if successful.""" + if self._loaded: + return True + if not await self._load_cache_file(): + _LOGGER.debug("Node %s failed to load cache file", self.mac) + return False + # Node Info + if not await self._node_info_load_from_cache(): + _LOGGER.debug("Node %s failed to load node_info from cache", self.mac) + return False + return True + + async def initialize(self) -> bool: + """Initialize node configuration.""" + if self._initialized: + return True + self._initialization_delay_expired = datetime.now(tz=UTC) + timedelta( + minutes=SUPPRESS_INITIALIZATION_WARNINGS + ) + self._initialized = True + return True + + async def _available_update_state( + self, available: bool, timestamp: datetime | None = None + ) -> None: + """Update the node availability state.""" + if self._available == available: + if ( + self._last_seen is not None + and timestamp is not None + and (timestamp - self._last_seen).seconds > 5 + + ): + self._last_seen = timestamp + await self.publish_feature_update_to_subscribers( + NodeFeature.AVAILABLE, self.available_state + ) + return + if timestamp is not None: + self._last_seen = timestamp + if available: + _LOGGER.info("Device %s detected to be available (on-line)", self.name) + self._available = True + await self.publish_feature_update_to_subscribers( + NodeFeature.AVAILABLE, self.available_state + ) + return + _LOGGER.info("Device %s detected to be not available (off-line)", self.name) + self._available = False + await self.publish_feature_update_to_subscribers( + NodeFeature.AVAILABLE, self.available_state + ) + + async def node_info_update( + self, node_info: NodeInfoResponse | None = None + ) -> NodeInfo | None: + """Update Node hardware information.""" + if node_info is None: + request = NodeInfoRequest(self._send, self._mac_in_bytes) + node_info = await request.send() + if node_info is None: + _LOGGER.debug("No response for node_info_update() for %s", self.mac) + await self._available_update_state(False) + return self._node_info + await self._available_update_state(True, node_info.timestamp) + await self.update_node_details( + firmware=node_info.firmware, + node_type=node_info.node_type, + hardware=node_info.hardware, + timestamp=node_info.timestamp, + relay_state=node_info.relay_state, + logaddress_pointer=node_info.current_logaddress_pointer, + ) + return self._node_info + + async def _node_info_load_from_cache(self) -> bool: + """Load node info settings from cache.""" + firmware = self._get_cache_as_datetime(CACHE_FIRMWARE) + hardware = self._get_cache(CACHE_HARDWARE) + timestamp = self._get_cache_as_datetime(CACHE_NODE_INFO_TIMESTAMP) + node_type: NodeType | None = None + if (node_type_str := self._get_cache(CACHE_NODE_TYPE)) is not None: + node_type = NodeType(int(node_type_str)) + return await self.update_node_details( + firmware=firmware, + hardware=hardware, + node_type=node_type, + timestamp=timestamp, + relay_state=None, + logaddress_pointer=None, + ) + + # pylint: disable=too-many-arguments + async def update_node_details( + self, + firmware: datetime | None, + hardware: str | None, + node_type: NodeType | None, + timestamp: datetime | None, + relay_state: bool | None, + logaddress_pointer: int | None, + ) -> bool: + """Process new node info and return true if all fields are updated.""" + complete = True + if firmware is None: + complete = False + else: + self._node_info.firmware = firmware + self._set_cache(CACHE_FIRMWARE, firmware) + if hardware is None: + complete = False + else: + if self._node_info.version != hardware: + # Generate modelname based on hardware version + hardware, model_info = version_to_model(hardware) + model_info = model_info.split(" ") + self._node_info.model = model_info[0] + # Handle + devices + if len(model_info) > 1 and "+" in model_info[1]: + self._node_info.model = model_info[0] + " " + model_info[1] + model_info[0] = self._node_info.model + model_info.pop(1) + + self._node_info.version = hardware + if self._node_info.model == "Unknown": + _LOGGER.warning( + "Failed to detect hardware model for %s based on '%s'", + self.mac, + hardware, + ) + self._node_info.model_type = None + if len(model_info) > 1: + self._node_info.model_type = " ".join(model_info[1:]) + if self._node_info.model is not None: + self._node_info.name = f"{model_info[0]} {self._node_info.mac[-5:]}" + self._set_cache(CACHE_HARDWARE, hardware) + if timestamp is None: + complete = False + else: + self._node_info.timestamp = timestamp + self._set_cache(CACHE_NODE_INFO_TIMESTAMP, timestamp) + if node_type is None: + complete = False + else: + self._node_info.node_type = NodeType(node_type) + self._set_cache(CACHE_NODE_TYPE, self._node_info.node_type.value) + await self.save_cache() + if timestamp is not None and timestamp > datetime.now(tz=UTC) - timedelta( + minutes=5 + ): + await self._available_update_state(True, timestamp) + return complete + + async def is_online(self) -> bool: + """Check if node is currently online.""" + if await self.ping_update() is None: + _LOGGER.debug("No response to ping for %s", self.mac) + return False + return True + + async def ping_update( + self, ping_response: NodePingResponse | None = None, retries: int = 1 + ) -> NetworkStatistics | None: + """Update ping statistics.""" + if ping_response is None: + request = NodePingRequest(self._send, self._mac_in_bytes, retries) + ping_response = await request.send() + if ping_response is None: + await self._available_update_state(False) + return None + await self._available_update_state(True, ping_response.timestamp) + self.update_ping_stats( + ping_response.timestamp, + ping_response.rssi_in, + ping_response.rssi_out, + ping_response.rtt, + ) + await self.publish_feature_update_to_subscribers(NodeFeature.PING, self._ping) + return self._ping + + def update_ping_stats( + self, timestamp: datetime, rssi_in: int, rssi_out: int, rtt: int + ) -> None: + """Update ping statistics.""" + self._ping.timestamp = timestamp + self._ping.rssi_in = rssi_in + self._ping.rssi_out = rssi_out + self._ping.rtt = rtt + self._available = True + + @raise_not_loaded + async def get_state(self, features: tuple[NodeFeature]) -> dict[NodeFeature, Any]: + """Update latest state for given feature.""" + states: dict[NodeFeature, Any] = {} + for feature in features: + if feature not in self._features: + raise NodeError( + f"Update of feature '{feature.name}' is " + + f"not supported for {self.mac}" + ) + if feature == NodeFeature.INFO: + states[NodeFeature.INFO] = await self.node_info_update() + elif feature == NodeFeature.AVAILABLE: + states[NodeFeature.AVAILABLE] = self.available_state + elif feature == NodeFeature.PING: + states[NodeFeature.PING] = await self.ping_update() + else: + raise NodeError( + f"Update of feature '{feature.name}' is " + + f"not supported for {self.mac}" + ) + return states + + async def unload(self) -> None: + """Deactivate and unload node features.""" + if not self._cache_enabled: + return + if self._cache_save_task is not None and not self._cache_save_task.done(): + await self._cache_save_task + await self.save_cache(trigger_only=False, full_write=True) + + def _get_cache(self, setting: str) -> str | None: + """Retrieve value of specified setting from cache memory.""" + if not self._cache_enabled: + return None + return self._node_cache.get_state(setting) + + def _get_cache_as_datetime(self, setting: str) -> datetime | None: + """Retrieve value of specified setting from cache memory and return it as datetime object.""" + if (timestamp_str := self._get_cache(setting)) is not None: + data = timestamp_str.split("-") + if len(data) == 6: + return datetime( + year=int(data[0]), + month=int(data[1]), + day=int(data[2]), + hour=int(data[3]), + minute=int(data[4]), + second=int(data[5]), + tzinfo=UTC, + ) + return None + + def _set_cache(self, setting: str, value: Any) -> None: + """Store setting with value in cache memory.""" + if not self._cache_enabled: + return + if isinstance(value, datetime): + self._node_cache.update_state( + setting, + f"{value.year}-{value.month}-{value.day}-{value.hour}" + + f"-{value.minute}-{value.second}", + ) + elif isinstance(value, str): + self._node_cache.update_state(setting, value) + else: + self._node_cache.update_state(setting, str(value)) + + async def save_cache( + self, trigger_only: bool = True, full_write: bool = False + ) -> None: + """Save cached data to cache file when cache is enabled.""" + if not self._cache_enabled or not self._loaded or not self._initialized: + return + _LOGGER.debug("Save cache file for node %s", self.mac) + if self._cache_save_task is not None and not self._cache_save_task.done(): + await self._cache_save_task + if trigger_only: + self._cache_save_task = create_task(self._node_cache.save_cache()) + else: + await self._node_cache.save_cache(rewrite=full_write) + + @staticmethod + def skip_update(data_class: Any, seconds: int) -> bool: + """Check if update can be skipped when timestamp of given dataclass is less than given seconds old.""" + if data_class is None: + return False + if not hasattr(data_class, "timestamp"): + return False + if data_class.timestamp is None: + return False + if data_class.timestamp + timedelta(seconds=seconds) > datetime.now(tz=UTC): + return True + return False + + # region Configuration of properties + @raise_not_loaded + async def set_awake_duration(self, seconds: int) -> bool: + """Change the awake duration.""" + if NodeFeature.BATTERY not in self._features: + raise FeatureError( + f"Changing awake duration is not supported for node {self.mac}" + ) + raise NotImplementedError() + + @raise_not_loaded + async def set_clock_interval(self, minutes: int) -> bool: + """Change the clock interval.""" + if NodeFeature.BATTERY not in self._features: + raise FeatureError( + f"Changing clock interval is not supported for node {self.mac}" + ) + raise NotImplementedError() + + @raise_not_loaded + async def set_clock_sync(self, sync: bool) -> bool: + """Change the clock synchronization setting.""" + if NodeFeature.BATTERY not in self._features: + raise FeatureError( + f"Configuration of clock sync is not supported for node {self.mac}" + ) + raise NotImplementedError() + + @raise_not_loaded + async def set_maintenance_interval(self, minutes: int) -> bool: + """Change the maintenance interval.""" + if NodeFeature.BATTERY not in self._features: + raise FeatureError( + f"Changing maintenance interval is not supported for node {self.mac}" + ) + raise NotImplementedError() + + @raise_not_loaded + async def set_motion_daylight_mode(self, state: bool) -> bool: + """Configure if motion must be detected when light level is below threshold.""" + if NodeFeature.MOTION not in self._features: + raise FeatureError( + f"Configuration of daylight mode is not supported for node {self.mac}" + ) + raise NotImplementedError() + + @raise_not_loaded + async def set_motion_reset_timer(self, minutes: int) -> bool: + """Configure the motion reset timer in minutes.""" + if NodeFeature.MOTION not in self._features: + raise FeatureError( + f"Changing motion reset timer is not supported for node {self.mac}" + ) + raise NotImplementedError() + + @raise_not_loaded + async def set_motion_sensitivity_level(self, level: MotionSensitivity) -> bool: + """Configure motion sensitivity level.""" + if NodeFeature.MOTION not in self._features: + raise FeatureError( + f"Configuration of motion sensitivity is not supported for node {self.mac}" + ) + raise NotImplementedError() + + @raise_not_loaded + async def set_relay(self, state: bool) -> bool: + """Change the state of the relay.""" + if NodeFeature.RELAY not in self._features: + raise FeatureError( + f"Changing state of relay is not supported for node {self.mac}" + ) + raise NotImplementedError() + + @raise_not_loaded + async def set_relay_init(self, state: bool) -> bool: + """Change the initial power-on state of the relay.""" + if NodeFeature.RELAY_INIT not in self._features: + raise FeatureError( + f"Configuration of initial power-up relay state is not supported for node {self.mac}" + ) + raise NotImplementedError() + + @raise_not_loaded + async def set_sleep_duration(self, minutes: int) -> bool: + """Change the sleep duration.""" + if NodeFeature.BATTERY not in self._features: + raise FeatureError( + f"Configuration of sleep duration is not supported for node {self.mac}" + ) + raise NotImplementedError() + + # endregion + + async def message_for_node(self, message: Any) -> None: + """Process message for node.""" + if isinstance(message, NodePingResponse): + await self.ping_update(message) + elif isinstance(message, NodeInfoResponse): + await self.node_info_update(message) + else: + raise NotImplementedError() diff --git a/plugwise_usb/nodes/py.typed b/plugwise_usb/nodes/py.typed new file mode 100644 index 000000000..e69de29bb diff --git a/plugwise_usb/nodes/scan.py b/plugwise_usb/nodes/scan.py index b9d2cdc3d..4137d66aa 100644 --- a/plugwise_usb/nodes/scan.py +++ b/plugwise_usb/nodes/scan.py @@ -1,137 +1,569 @@ """Plugwise Scan node object.""" + +from __future__ import annotations + +from asyncio import Task, gather +from collections.abc import Awaitable, Callable, Coroutine +from dataclasses import replace +from datetime import UTC, datetime import logging +from typing import Any, Final -from ..constants import ( - FEATURE_MOTION, - FEATURE_PING, - FEATURE_RSSI_IN, - FEATURE_RSSI_OUT, - SCAN_CONFIGURE_ACCEPTED, - SCAN_DAYLIGHT_MODE, - SCAN_MOTION_RESET_TIMER, - SCAN_SENSITIVITY_HIGH, - SCAN_SENSITIVITY_MEDIUM, - SCAN_SENSITIVITY_OFF, -) +from ..api import MotionConfig, MotionSensitivity, MotionState, NodeEvent, NodeFeature +from ..connection import StickController +from ..exceptions import MessageError, NodeError, NodeTimeout from ..messages.requests import ScanConfigureRequest, ScanLightCalibrateRequest -from ..messages.responses import NodeAckResponse, NodeSwitchGroupResponse +from ..messages.responses import ( + NODE_SWITCH_GROUP_ID, + NodeAckResponseType, + NodeSwitchGroupResponse, + PlugwiseResponse, +) from ..nodes.sed import NodeSED +from .helpers import raise_not_loaded +from .helpers.firmware import SCAN_FIRMWARE_SUPPORT _LOGGER = logging.getLogger(__name__) +CACHE_MOTION_STATE = "motion_state" +CACHE_MOTION_TIMESTAMP = "motion_timestamp" +CACHE_MOTION_RESET_TIMER = "motion_reset_timer" + +CACHE_SCAN_SENSITIVITY = "scan_sensitivity_level" +CACHE_SCAN_DAYLIGHT_MODE = "scan_daylight_mode" + + +# region Defaults for Scan Devices + +SCAN_DEFAULT_MOTION_STATE: Final = False + +# Time in minutes the motion sensor should not sense motion to +# report "no motion" state [Source: 1min - 4uur] +SCAN_DEFAULT_MOTION_RESET_TIMER: Final = 10 + +# Default sensitivity of the motion sensors +SCAN_DEFAULT_SENSITIVITY: Final = MotionSensitivity.MEDIUM + +# Light override +SCAN_DEFAULT_DAYLIGHT_MODE: Final = False + +# endregion + class PlugwiseScan(NodeSED): - """provides interface to the Plugwise Scan nodes""" - - def __init__(self, mac, address, message_sender): - super().__init__(mac, address, message_sender) - self._features = ( - FEATURE_MOTION["id"], - FEATURE_PING["id"], - FEATURE_RSSI_IN["id"], - FEATURE_RSSI_OUT["id"], - ) - self._motion_state = False - self._motion_reset_timer = None - self._daylight_mode = None - self._sensitivity = None - self._new_motion_reset_timer = None - self._new_daylight_mode = None - self._new_sensitivity = None + """Plugwise Scan node.""" + + def __init__( + self, + mac: str, + address: int, + controller: StickController, + loaded_callback: Callable[[NodeEvent, str], Awaitable[None]], + ): + """Initialize Scan Device.""" + super().__init__(mac, address, controller, loaded_callback) + self._unsubscribe_switch_group: Callable[[], None] | None = None + self._reset_timer_motion_on: datetime | None = None + self._scan_subscription: Callable[[], None] | None = None + + self._motion_state = MotionState() + self._motion_config = MotionConfig() + self._new_daylight_mode: bool | None = None + self._new_reset_timer: int | None = None + self._new_sensitivity_level: MotionSensitivity | None = None + + self._scan_config_task_scheduled = False + self._configure_daylight_mode_task: Task[Coroutine[Any, Any, None]] | None = ( + None + ) + + # region Load & Initialize + + async def load(self) -> bool: + """Load and activate Scan node features.""" + if self._loaded: + return True + if self._cache_enabled: + _LOGGER.debug("Load Scan node %s from cache", self._node_info.mac) + await self._load_from_cache() + else: + self._load_defaults() + self._loaded = True + self._setup_protocol( + SCAN_FIRMWARE_SUPPORT, + ( + NodeFeature.BATTERY, + NodeFeature.INFO, + NodeFeature.PING, + NodeFeature.MOTION, + NodeFeature.MOTION_CONFIG, + ), + ) + if await self.initialize(): + await self._loaded_callback(NodeEvent.LOADED, self.mac) + return True + _LOGGER.debug("Load of Scan node %s failed", self._node_info.mac) + return False + + @raise_not_loaded + async def initialize(self) -> bool: + """Initialize Scan node.""" + if self._initialized: + return True + self._unsubscribe_switch_group = await self._message_subscribe( + self._switch_group, + self._mac_in_bytes, + (NODE_SWITCH_GROUP_ID,), + ) + return await super().initialize() + + async def unload(self) -> None: + """Unload node.""" + if self._unsubscribe_switch_group is not None: + self._unsubscribe_switch_group() + await super().unload() + + # region Caching + def _load_defaults(self) -> None: + """Load default configuration settings.""" + super()._load_defaults() + self._motion_state = MotionState( + state=SCAN_DEFAULT_MOTION_STATE, + timestamp=None, + ) + self._motion_config = MotionConfig( + reset_timer=SCAN_DEFAULT_MOTION_RESET_TIMER, + daylight_mode=SCAN_DEFAULT_DAYLIGHT_MODE, + sensitivity_level=SCAN_DEFAULT_SENSITIVITY, + ) + + async def _load_from_cache(self) -> bool: + """Load states from previous cached information. Returns True if successful.""" + if not await super()._load_from_cache(): + self._load_defaults() + return False + self._motion_state = MotionState( + state=self._motion_from_cache(), + timestamp=self._motion_timestamp_from_cache(), + ) + self._motion_config = MotionConfig( + daylight_mode=self._daylight_mode_from_cache(), + reset_timer=self._reset_timer_from_cache(), + sensitivity_level=self._sensitivity_level_from_cache(), + ) + return True + + def _daylight_mode_from_cache(self) -> bool: + """Load awake duration from cache.""" + if (daylight_mode := self._get_cache(CACHE_SCAN_DAYLIGHT_MODE)) is not None: + if daylight_mode == "True": + return True + return False + return SCAN_DEFAULT_DAYLIGHT_MODE + + def _motion_from_cache(self) -> bool: + """Load motion state from cache.""" + if (cached_motion_state := self._get_cache(CACHE_MOTION_STATE)) is not None: + if cached_motion_state == "True": + if ( + motion_timestamp := self._motion_timestamp_from_cache() + ) is not None: + if ( + datetime.now(tz=UTC) - motion_timestamp + ).seconds < self._reset_timer_from_cache() * 60: + return True + return False + return SCAN_DEFAULT_MOTION_STATE + + def _reset_timer_from_cache(self) -> int: + """Load reset timer from cache.""" + if (reset_timer := self._get_cache(CACHE_MOTION_RESET_TIMER)) is not None: + return int(reset_timer) + return SCAN_DEFAULT_MOTION_RESET_TIMER + + def _sensitivity_level_from_cache(self) -> MotionSensitivity: + """Load sensitivity level from cache.""" + if (sensitivity_level := self._get_cache(CACHE_SCAN_SENSITIVITY)) is not None: + return MotionSensitivity[sensitivity_level] + return SCAN_DEFAULT_SENSITIVITY + + def _motion_timestamp_from_cache(self) -> datetime | None: + """Load motion timestamp from cache.""" + if ( + motion_timestamp := self._get_cache_as_datetime(CACHE_MOTION_TIMESTAMP) + ) is not None: + return motion_timestamp + return None + + # endregion + + # region Properties + + @property + @raise_not_loaded + def daylight_mode(self) -> bool: + """Daylight mode of motion sensor.""" + if self._new_daylight_mode is not None: + return self._new_daylight_mode + if self._motion_config.daylight_mode is not None: + return self._motion_config.daylight_mode + return SCAN_DEFAULT_DAYLIGHT_MODE @property + @raise_not_loaded def motion(self) -> bool: - """Return the last known motion state""" + """Motion detection value.""" + if self._motion_state.state is not None: + return self._motion_state.state + raise NodeError(f"Motion state is not available for {self.name}") + + @property + @raise_not_loaded + def motion_state(self) -> MotionState: + """Motion detection state.""" return self._motion_state - def message_for_scan(self, message): - """Process received message""" - if isinstance(message, NodeSwitchGroupResponse): + @property + @raise_not_loaded + def motion_timestamp(self) -> datetime: + """Timestamp of last motion state change.""" + if self._motion_state.timestamp is not None: + return self._motion_state.timestamp + raise NodeError(f"Motion timestamp is currently not available for {self.name}") + + @property + @raise_not_loaded + def motion_config(self) -> MotionConfig: + """Motion configuration.""" + return MotionConfig( + reset_timer=self.reset_timer, + daylight_mode=self.daylight_mode, + sensitivity_level=self.sensitivity_level, + ) + + @property + @raise_not_loaded + def reset_timer(self) -> int: + """Total minutes without motion before no motion is reported.""" + if self._new_reset_timer is not None: + return self._new_reset_timer + if self._motion_config.reset_timer is not None: + return self._motion_config.reset_timer + return SCAN_DEFAULT_MOTION_RESET_TIMER + + @property + def scan_config_task_scheduled(self) -> bool: + """Check if a configuration task is scheduled.""" + return self._scan_config_task_scheduled + + @property + def sensitivity_level(self) -> MotionSensitivity: + """Sensitivity level of motion sensor.""" + if self._new_sensitivity_level is not None: + return self._new_sensitivity_level + if self._motion_config.sensitivity_level is not None: + return self._motion_config.sensitivity_level + return SCAN_DEFAULT_SENSITIVITY + + # endregion + # region Configuration actions + + @raise_not_loaded + async def set_motion_daylight_mode(self, state: bool) -> bool: + """Configure if motion must be detected when light level is below threshold. + + Configuration request will be queued and will be applied the next time when node is awake for maintenance. + """ + _LOGGER.debug( + "set_motion_daylight_mode | Device %s | %s -> %s", + self.name, + self._motion_config.daylight_mode, + state, + ) + self._new_daylight_mode = state + if self._motion_config.daylight_mode == state: + return False + if not self._scan_config_task_scheduled: + await self.schedule_task_when_awake(self._configure_scan_task()) + self._scan_config_task_scheduled = True _LOGGER.debug( - "Switch group %s to state %s received from %s", - str(message.group.value), - str(message.power_state.value), - self.mac, + "set_motion_daylight_mode | Device %s | config scheduled", + self.name, ) - self._process_switch_group(message) - elif isinstance(message, NodeAckResponse): - self._process_ack_message(message) - else: - _LOGGER.info( - "Unsupported message %s received from %s", - message.__class__.__name__, - self.mac, + return True + + @raise_not_loaded + async def set_motion_reset_timer(self, minutes: int) -> bool: + """Configure the motion reset timer in minutes.""" + _LOGGER.debug( + "set_motion_reset_timer | Device %s | %s -> %s", + self.name, + self._motion_config.reset_timer, + minutes, + ) + if minutes < 1 or minutes > 255: + raise ValueError( + f"Invalid motion reset timer ({minutes}). It must be between 1 and 255 minutes." ) + self._new_reset_timer = minutes + if self._motion_config.reset_timer == minutes: + return False + if not self._scan_config_task_scheduled: + await self.schedule_task_when_awake(self._configure_scan_task()) + self._scan_config_task_scheduled = True + _LOGGER.debug( + "set_motion_reset_timer | Device %s | config scheduled", + self.name, + ) + return True - def _process_ack_message(self, message): - """Process acknowledge message""" - if message.ack_id == SCAN_CONFIGURE_ACCEPTED: - self._motion_reset_timer = self._new_motion_reset_timer - self._daylight_mode = self._new_daylight_mode - self._sensitivity = self._new_sensitivity - else: - _LOGGER.info( - "Unsupported ack message %s received for %s", - str(message.ack_id), - self.mac, + @raise_not_loaded + async def set_motion_sensitivity_level(self, level: MotionSensitivity) -> bool: + """Configure the motion sensitivity level.""" + _LOGGER.debug( + "set_motion_sensitivity_level | Device %s | %s -> %s", + self.name, + self._motion_config.sensitivity_level, + level, + ) + self._new_sensitivity_level = level + if self._motion_config.sensitivity_level == level: + return False + if not self._scan_config_task_scheduled: + await self.schedule_task_when_awake(self._configure_scan_task()) + self._scan_config_task_scheduled = True + _LOGGER.debug( + "set_motion_sensitivity_level | Device %s | config scheduled", + self.name, ) + return True + + # endregion + + # endregion + + async def _switch_group(self, response: PlugwiseResponse) -> bool: + """Switch group request from Scan. - def _process_switch_group(self, message): - """Switch group request from Scan""" - if message.power_state.value == 0: - # turn off => clear motion - if self._motion_state: - self._motion_state = False - self.do_callback(FEATURE_MOTION["id"]) - elif message.power_state.value == 1: - # turn on => motion - if not self._motion_state: - self._motion_state = True - self.do_callback(FEATURE_MOTION["id"]) + turn on => motion, turn off => clear motion + """ + if not isinstance(response, NodeSwitchGroupResponse): + raise MessageError( + f"Invalid response message type ({response.__class__.__name__}) received, expected NodeSwitchGroupResponse" + ) + _LOGGER.warning("%s received %s", self.name, response) + await gather( + self._available_update_state(True, response.timestamp), + self._motion_state_update(response.switch_state, response.timestamp) + ) + return True + + async def _motion_state_update( + self, motion_state: bool, timestamp: datetime + ) -> None: + """Process motion state update.""" + _LOGGER.debug( + "motion_state_update for %s: %s -> %s", + self.name, + self._motion_state.state, + motion_state, + ) + state_update = False + if motion_state: + self._set_cache(CACHE_MOTION_STATE, "True") + if self._motion_state.state is None or not self._motion_state.state: + self._reset_timer_motion_on = timestamp + state_update = True else: - _LOGGER.warning( - "Unknown power_state (%s) received from %s", - str(message.power_state.value), - self.mac, + self._set_cache(CACHE_MOTION_STATE, "False") + if self._motion_state.state is None or self._motion_state.state: + if self._reset_timer_motion_on is not None: + reset_timer = (timestamp - self._reset_timer_motion_on).seconds + if self._motion_config.reset_timer is None: + self._motion_config = replace( + self._motion_config, + reset_timer=reset_timer, + ) + elif reset_timer < self._motion_config.reset_timer: + _LOGGER.warning( + "Adjust reset timer for %s from %s -> %s", + self.name, + self._motion_config.reset_timer, + reset_timer, + ) + self._motion_config = replace( + self._motion_config, + reset_timer=reset_timer, + ) + state_update = True + self._set_cache(CACHE_MOTION_TIMESTAMP, timestamp) + if state_update: + self._motion_state = replace( + self._motion_state, + state=motion_state, + timestamp=timestamp, + ) + await gather( + *[ + self.publish_feature_update_to_subscribers( + NodeFeature.MOTION, + self._motion_state, + ), + self.save_cache(), + ] ) - # TODO: 20220125 snakestyle name - # pylint: disable=invalid-name - def CalibrateLight(self, callback=None): - """Queue request to calibration light sensitivity""" - self._queue_request(ScanLightCalibrateRequest(self._mac), callback) + async def _configure_scan_task(self) -> bool: + """Configure Scan device settings. Returns True if successful.""" + self._scan_config_task_scheduled = False + change_required = False + if self._new_reset_timer is not None: + change_required = True + if self._new_sensitivity_level is not None: + change_required = True + if self._new_daylight_mode is not None: + change_required = True + if not change_required: + return True + if not await self.scan_configure( + motion_reset_timer=self.reset_timer, + sensitivity_level=self.sensitivity_level, + daylight_mode=self.daylight_mode, + ): + return False + if self._new_reset_timer is not None: + _LOGGER.info( + "Change of motion reset timer from %s to %s minutes has been accepted by %s", + self._motion_config.reset_timer, + self._new_reset_timer, + self.name, + ) + self._new_reset_timer = None + if self._new_sensitivity_level is not None: + _LOGGER.info( + "Change of sensitivity level from %s to %s has been accepted by %s", + self._motion_config.sensitivity_level, + self._new_sensitivity_level, + self.name, + ) + self._new_sensitivity_level = None + if self._new_daylight_mode is not None: + _LOGGER.info( + "Change of daylight mode from %s to %s has been accepted by %s", + "On" if self._motion_config.daylight_mode else "Off", + "On" if self._new_daylight_mode else "Off", + self.name, + ) + self._new_daylight_mode = None + return True - # TODO: 20220125 snakestyle name - # pylint: disable=invalid-name - def Configure_scan( + async def scan_configure( self, - motion_reset_timer=SCAN_MOTION_RESET_TIMER, - sensitivity_level=SCAN_SENSITIVITY_MEDIUM, - daylight_mode=SCAN_DAYLIGHT_MODE, - callback=None, - ): - """Queue request to set motion reporting settings""" - self._new_motion_reset_timer = motion_reset_timer - self._new_daylight_mode = daylight_mode - if sensitivity_level == SCAN_SENSITIVITY_HIGH: + motion_reset_timer: int, + sensitivity_level: MotionSensitivity, + daylight_mode: bool, + ) -> bool: + """Configure Scan device settings. Returns True if successful.""" + # Default to medium: + sensitivity_value = 30 # b'1E' + if sensitivity_level == MotionSensitivity.HIGH: sensitivity_value = 20 # b'14' - elif sensitivity_level == SCAN_SENSITIVITY_OFF: + if sensitivity_level == MotionSensitivity.OFF: sensitivity_value = 255 # b'FF' + request = ScanConfigureRequest( + self._send, + self._mac_in_bytes, + motion_reset_timer, + sensitivity_value, + daylight_mode, + ) + if (response := await request.send()) is not None: + if response.node_ack_type == NodeAckResponseType.SCAN_CONFIG_FAILED: + self._new_reset_timer = None + self._new_sensitivity_level = None + self._new_daylight_mode = None + _LOGGER.warning("Failed to configure scan settings for %s", self.name) + return False + if response.node_ack_type == NodeAckResponseType.SCAN_CONFIG_ACCEPTED: + await self._scan_configure_update( + motion_reset_timer, sensitivity_level, daylight_mode + ) + return True + _LOGGER.warning( + "Unexpected response ack type %s for %s", + response.node_ack_type, + self.name, + ) + return False + self._new_reset_timer = None + self._new_sensitivity_level = None + self._new_daylight_mode = None + return False + + async def _scan_configure_update( + self, + motion_reset_timer: int, + sensitivity_level: MotionSensitivity, + daylight_mode: bool, + ) -> None: + """Process result of scan configuration update.""" + self._motion_config = replace( + self._motion_config, + reset_timer=motion_reset_timer, + sensitivity_level=sensitivity_level, + daylight_mode=daylight_mode, + ) + self._set_cache(CACHE_MOTION_RESET_TIMER, str(motion_reset_timer)) + self._set_cache(CACHE_SCAN_SENSITIVITY, sensitivity_level.value) + if daylight_mode: + self._set_cache(CACHE_SCAN_DAYLIGHT_MODE, "True") else: - # Default to medium: - sensitivity_value = 30 # b'1E' - self._new_sensitivity = sensitivity_level - self._queue_request( - ScanConfigureRequest( - self._mac, motion_reset_timer, sensitivity_value, daylight_mode + self._set_cache(CACHE_SCAN_DAYLIGHT_MODE, "False") + await gather( + self.publish_feature_update_to_subscribers( + NodeFeature.MOTION_CONFIG, + self._motion_config, ), - callback, + self.save_cache(), ) - # TODO: 20220125 snakestyle name - # pylint: disable=invalid-name - def SetMotionAction(self, callback=None): - """Queue Configure Scan to signal motion""" - # TODO: + async def scan_calibrate_light(self) -> bool: + """Request to calibration light sensitivity of Scan device.""" + request = ScanLightCalibrateRequest(self._send, self._mac_in_bytes) + if (response := await request.send()) is not None: + if ( + response.node_ack_type + == NodeAckResponseType.SCAN_LIGHT_CALIBRATION_ACCEPTED + ): + return True + return False + raise NodeTimeout( + f"No response from Scan device {self.mac} " + + "to light calibration request." + ) - # self._queue_request(NodeSwitchGroupRequest(self._mac), callback) + @raise_not_loaded + async def get_state(self, features: tuple[NodeFeature]) -> dict[NodeFeature, Any]: + """Update latest state for given feature.""" + states: dict[NodeFeature, Any] = {} + for feature in features: + _LOGGER.debug( + "Updating node %s - feature '%s'", + self._node_info.mac, + feature, + ) + if feature not in self._features: + raise NodeError( + f"Update of feature '{feature.name}' is " + + f"not supported for {self.mac}" + ) + if feature == NodeFeature.MOTION: + states[NodeFeature.MOTION] = self._motion_state + elif feature == NodeFeature.MOTION_CONFIG: + states[NodeFeature.MOTION_CONFIG] = self._motion_config + else: + state_result = await super().get_state((feature,)) + states[feature] = state_result[feature] + if NodeFeature.AVAILABLE not in states: + states[NodeFeature.AVAILABLE] = self.available_state + return states diff --git a/plugwise_usb/nodes/sed.py b/plugwise_usb/nodes/sed.py index 9bb6265ad..69197c19b 100644 --- a/plugwise_usb/nodes/sed.py +++ b/plugwise_usb/nodes/sed.py @@ -1,162 +1,767 @@ """Plugwise SED (Sleeping Endpoint Device) base object.""" -# TODO: -# - Expose awake state as sensor -# - Set available state after 2 missed awake messages +from __future__ import annotations +from asyncio import ( + CancelledError, + Future, + Lock, + Task, + gather, + get_running_loop, + wait_for, +) +from collections.abc import Awaitable, Callable, Coroutine +from dataclasses import replace +from datetime import datetime, timedelta import logging +from typing import Any, Final -from ..constants import ( - FEATURE_PING, - FEATURE_RSSI_IN, - FEATURE_RSSI_OUT, - PRIORITY_HIGH, - SED_AWAKE_BUTTON, - SED_AWAKE_FIRST, - SED_AWAKE_MAINTENANCE, - SED_AWAKE_STARTUP, - SED_AWAKE_STATE, - SED_CLOCK_INTERVAL, - SED_CLOCK_SYNC, - SED_MAINTENANCE_INTERVAL, - SED_SLEEP_FOR, - SED_STAY_ACTIVE, - SLEEP_SET, +from ..api import BatteryConfig, NodeEvent, NodeFeature, NodeInfo +from ..connection import StickController +from ..exceptions import MessageError, NodeError +from ..messages.requests import NodeSleepConfigRequest +from ..messages.responses import ( + NODE_AWAKE_RESPONSE_ID, + NodeAwakeResponse, + NodeAwakeResponseType, + NodeInfoResponse, + NodeResponseType, + PlugwiseResponse, ) -from ..messages.requests import NodeInfoRequest, NodePingRequest, NodeSleepConfigRequest -from ..messages.responses import NodeAckLargeResponse, NodeAwakeResponse -from ..nodes import PlugwiseNode +from .helpers import raise_not_loaded +from .node import PlugwiseBaseNode + +CACHE_AWAKE_DURATION = "awake_duration" +CACHE_CLOCK_INTERVAL = "clock_interval" +CACHE_SLEEP_DURATION = "sleep_duration" +CACHE_CLOCK_SYNC = "clock_sync" +CACHE_MAINTENANCE_INTERVAL = "maintenance_interval" +CACHE_AWAKE_TIMESTAMP = "awake_timestamp" +CACHE_AWAKE_REASON = "awake_reason" + +# Number of seconds to ignore duplicate awake messages +AWAKE_RETRY: Final = 5 + +# Defaults for 'Sleeping End Devices' + +# Time in seconds the SED keep itself awake to receive +# and respond to other messages +SED_DEFAULT_AWAKE_DURATION: Final = 10 + +# 7 days, duration in minutes the node synchronize its clock +SED_DEFAULT_CLOCK_INTERVAL: Final = 25200 + +# Enable or disable synchronizing clock +SED_DEFAULT_CLOCK_SYNC: Final = False + +# Interval in minutes the SED will awake for maintenance purposes +# Source [5min - 24h] +SED_DEFAULT_MAINTENANCE_INTERVAL: Final = 60 # Assume standard interval of 1 hour +SED_MAX_MAINTENANCE_INTERVAL_OFFSET: Final = 30 # seconds + +# Time in minutes the SED will sleep +SED_DEFAULT_SLEEP_DURATION: Final = 60 + _LOGGER = logging.getLogger(__name__) -class NodeSED(PlugwiseNode): - """provides base class for SED based nodes like Scan, Sense & Switch""" - - def __init__(self, mac, address, message_sender): - super().__init__(mac, address, message_sender) - self._sed_requests = {} - self.maintenance_interval = SED_MAINTENANCE_INTERVAL - self._new_maintenance_interval = None - self._wake_up_interval = None - self._battery_powered = True - - def message_for_sed(self, message): - """Process received message""" - if isinstance(message, NodeAwakeResponse): - self._process_awake_response(message) - elif isinstance(message, NodeAckLargeResponse): - if message.ack_id == SLEEP_SET: - self.maintenance_interval = self._new_maintenance_interval - else: - self.message_for_scan(message) - self.message_for_switch(message) - self.message_for_sense(message) - else: - self.message_for_scan(message) - self.message_for_switch(message) - self.message_for_sense(message) +class NodeSED(PlugwiseBaseNode): + """provides base class for SED based nodes like Scan, Sense & Switch.""" + + # Maintenance + _awake_timer_task: Task[None] | None = None + _ping_at_awake: bool = False - def message_for_scan(self, message): - """Pass messages to PlugwiseScan class""" + _awake_subscription: Callable[[], None] | None = None - def message_for_switch(self, message): - """Pass messages to PlugwiseSwitch class""" + def __init__( + self, + mac: str, + address: int, + controller: StickController, + loaded_callback: Callable[[NodeEvent, str], Awaitable[None]], + ): + """Initialize base class for Sleeping End Device.""" + super().__init__(mac, address, controller, loaded_callback) + self._loop = get_running_loop() + self._node_info.is_battery_powered = True - def message_for_sense(self, message): - """Pass messages to PlugwiseSense class""" + # Configure SED + self._battery_config = BatteryConfig() + self._new_battery_config = BatteryConfig() + self._sed_config_task_scheduled = False + self._send_task_queue: list[Coroutine[Any, Any, bool]] = [] + self._send_task_lock = Lock() + self._delayed_task: Task[None] | None = None - def _process_awake_response(self, message): - """ "Process awake message""" - _LOGGER.debug( - "Awake message type '%s' received from %s", - str(message.awake_type.value), - self.mac, - ) - if message.awake_type.value in [ - SED_AWAKE_MAINTENANCE, - SED_AWAKE_FIRST, - SED_AWAKE_STARTUP, - SED_AWAKE_BUTTON, - ]: - for pending_request in self._sed_requests.values(): - request_message, callback = pending_request - _LOGGER.info( - "Send queued %s message to SED node %s", - request_message.__class__.__name__, - self.mac, - ) - self.message_sender(request_message, callback, -1, PRIORITY_HIGH) - self._sed_requests = {} + self._last_awake: dict[NodeAwakeResponseType, datetime] = {} + self._last_awake_reason: str = "Unknown" + self._awake_future: Future[bool] | None = None + + # Maintenance + self._maintenance_last_awake: datetime | None = None + self._maintenance_interval_restored_from_cache = False + + async def load(self) -> bool: + """Load and activate SED node features.""" + if self._loaded: + return True + if self._cache_enabled: + _LOGGER.debug("Load SED node %s from cache", self._node_info.mac) + await self._load_from_cache() else: - if message.awake_type.value == SED_AWAKE_STATE: - _LOGGER.debug("Node %s awake for state change", self.mac) - else: - _LOGGER.info( - "Unknown awake message type (%s) received for node %s", - str(message.awake_type.value), - self.mac, - ) + self._load_defaults() + self._loaded = True + self._features += (NodeFeature.BATTERY,) + if await self.initialize(): + await self._loaded_callback(NodeEvent.LOADED, self.mac) + return True + _LOGGER.debug("Load of SED node %s failed", self._node_info.mac) + return False + + async def unload(self) -> None: + """Deactivate and unload node features.""" + if self._awake_future is not None and not self._awake_future.done(): + self._awake_future.set_result(True) + if self._awake_timer_task is not None and not self._awake_timer_task.done(): + await self._awake_timer_task + if self._awake_subscription is not None: + self._awake_subscription() + if self._delayed_task is not None and not self._delayed_task.done(): + await self._delayed_task + if len(self._send_task_queue) > 0: + _LOGGER.warning( + "Unable to execute %s open tasks for %s", + len(self._send_task_queue), + self.name, + ) + await super().unload() + + @raise_not_loaded + async def initialize(self) -> bool: + """Initialize SED node.""" + if self._initialized: + return True + self._awake_subscription = await self._message_subscribe( + self._awake_response, + self._mac_in_bytes, + (NODE_AWAKE_RESPONSE_ID,), + ) + return await super().initialize() - def _queue_request(self, request_message, callback=None): - """Queue request to be sent when SED is awake. Last message wins.""" - self._sed_requests[request_message.ID] = ( - request_message, - callback, + def _load_defaults(self) -> None: + """Load default configuration settings.""" + self._battery_config = BatteryConfig( + awake_duration=SED_DEFAULT_AWAKE_DURATION, + clock_interval=SED_DEFAULT_CLOCK_INTERVAL, + clock_sync=SED_DEFAULT_CLOCK_SYNC, + maintenance_interval=SED_DEFAULT_MAINTENANCE_INTERVAL, + sleep_duration=SED_DEFAULT_SLEEP_DURATION, ) - def _request_info(self, callback=None): - """Request info from node""" - self._queue_request( - NodeInfoRequest(self._mac), - callback, + async def _load_from_cache(self) -> bool: + """Load states from previous cached information. Returns True if successful.""" + if not await super()._load_from_cache(): + self._load_defaults() + return False + self._battery_config = BatteryConfig( + awake_duration=self._awake_duration_from_cache(), + clock_interval=self._clock_interval_from_cache(), + clock_sync=self._clock_sync_from_cache(), + maintenance_interval=self._maintenance_interval_from_cache(), + sleep_duration=self._sleep_duration_from_cache(), ) + self._awake_timestamp_from_cache() + self._awake_reason_from_cache() + return True - def _request_ping(self, callback=None, ignore_sensor=True): - """Ping node""" + def _awake_duration_from_cache(self) -> int: + """Load awake duration from cache.""" + if (awake_duration := self._get_cache(CACHE_AWAKE_DURATION)) is not None: + return int(awake_duration) + return SED_DEFAULT_AWAKE_DURATION + + def _clock_interval_from_cache(self) -> int: + """Load clock interval from cache.""" + if (clock_interval := self._get_cache(CACHE_CLOCK_INTERVAL)) is not None: + return int(clock_interval) + return SED_DEFAULT_CLOCK_INTERVAL + + def _clock_sync_from_cache(self) -> bool: + """Load clock sync state from cache.""" + if (clock_sync := self._get_cache(CACHE_CLOCK_SYNC)) is not None: + if clock_sync == "True": + return True + return False + return SED_DEFAULT_CLOCK_SYNC + + def _maintenance_interval_from_cache(self) -> int: + """Load maintenance interval from cache.""" if ( - ignore_sensor - or self._callbacks.get(FEATURE_PING["id"]) - or self._callbacks.get(FEATURE_RSSI_IN["id"]) - or self._callbacks.get(FEATURE_RSSI_OUT["id"]) + maintenance_interval := self._get_cache(CACHE_MAINTENANCE_INTERVAL) + ) is not None: + self._maintenance_interval_restored_from_cache = True + return int(maintenance_interval) + return SED_DEFAULT_MAINTENANCE_INTERVAL + + def _sleep_duration_from_cache(self) -> int: + """Load sleep duration from cache.""" + if (sleep_duration := self._get_cache(CACHE_SLEEP_DURATION)) is not None: + return int(sleep_duration) + return SED_DEFAULT_SLEEP_DURATION + + def _awake_timestamp_from_cache(self) -> datetime | None: + """Load last awake timestamp from cache.""" + return self._get_cache_as_datetime(CACHE_AWAKE_TIMESTAMP) + + def _awake_reason_from_cache(self) -> str | None: + """Load last awake state from cache.""" + return self._get_cache(CACHE_AWAKE_REASON) + + # region Configuration actions + @raise_not_loaded + async def set_awake_duration(self, seconds: int) -> bool: + """Change the awake duration.""" + _LOGGER.debug( + "set_awake_duration | Device %s | %s -> %s", + self.name, + self._battery_config.awake_duration, + seconds, + ) + if seconds < 1 or seconds > 255: + raise ValueError( + f"Invalid awake duration ({seconds}). It must be between 1 and 255 seconds." + ) + if self._battery_config.awake_duration == seconds: + self._new_battery_config = replace( + self._new_battery_config, awake_duration=seconds + ) + return False + self._new_battery_config = replace( + self._new_battery_config, awake_duration=seconds + ) + if not self._sed_config_task_scheduled: + await self.schedule_task_when_awake(self._configure_sed_task()) + self._sed_config_task_scheduled = True + _LOGGER.debug( + "set_awake_duration | Device %s | config scheduled", + self.name, + ) + return True + + @raise_not_loaded + async def set_clock_interval(self, minutes: int) -> bool: + """Change the clock interval.""" + _LOGGER.debug( + "set_clock_interval | Device %s | %s -> %s", + self.name, + self._battery_config.clock_interval, + minutes, + ) + if minutes < 1 or minutes > 65535: + raise ValueError( + f"Invalid clock interval ({minutes}). It must be between 1 and 65535 minutes." + ) + if self.battery_config.clock_interval == minutes: + self._new_battery_config = replace( + self._new_battery_config, clock_interval=minutes + ) + return False + self._new_battery_config = replace( + self._new_battery_config, clock_interval=minutes + ) + if not self._sed_config_task_scheduled: + await self.schedule_task_when_awake(self._configure_sed_task()) + self._sed_config_task_scheduled = True + _LOGGER.debug( + "set_clock_interval | Device %s | config scheduled", + self.name, + ) + return True + + @raise_not_loaded + async def set_clock_sync(self, sync: bool) -> bool: + """Change the clock synchronization setting.""" + _LOGGER.debug( + "set_clock_sync | Device %s | %s -> %s", + self.name, + self._battery_config.clock_sync, + sync, + ) + if self._battery_config.clock_sync == sync: + self._new_battery_config = replace( + self._new_battery_config, clock_sync=sync + ) + return False + self._new_battery_config = replace(self._new_battery_config, clock_sync=sync) + if not self._sed_config_task_scheduled: + await self.schedule_task_when_awake(self._configure_sed_task()) + self._sed_config_task_scheduled = True + _LOGGER.debug( + "set_clock_sync | Device %s | config scheduled", + self.name, + ) + return True + + @raise_not_loaded + async def set_maintenance_interval(self, minutes: int) -> bool: + """Change the maintenance interval.""" + _LOGGER.debug( + "set_maintenance_interval | Device %s | %s -> %s", + self.name, + self._battery_config.maintenance_interval, + minutes, + ) + if minutes < 1 or minutes > 1440: + raise ValueError( + f"Invalid maintenance interval ({minutes}). It must be between 1 and 1440 minutes." + ) + if self.battery_config.maintenance_interval == minutes: + self._new_battery_config = replace( + self._new_battery_config, maintenance_interval=minutes + ) + return False + self._new_battery_config = replace( + self._new_battery_config, maintenance_interval=minutes + ) + if not self._sed_config_task_scheduled: + await self.schedule_task_when_awake(self._configure_sed_task()) + self._sed_config_task_scheduled = True + _LOGGER.debug( + "set_maintenance_interval | Device %s | config scheduled", + self.name, + ) + return True + + @raise_not_loaded + async def set_sleep_duration(self, minutes: int) -> bool: + """Reconfigure the sleep duration in minutes for a Sleeping Endpoint Device. + + Configuration will be applied next time when node is awake for maintenance. + """ + _LOGGER.debug( + "set_sleep_duration | Device %s | %s -> %s", + self.name, + self._battery_config.sleep_duration, + minutes, + ) + if minutes < 1 or minutes > 65535: + raise ValueError( + f"Invalid sleep duration ({minutes}). It must be between 1 and 65535 minutes." + ) + if self._battery_config.sleep_duration == minutes: + self._new_battery_config = replace( + self._new_battery_config, sleep_duration=minutes + ) + return False + self._new_battery_config = replace( + self._new_battery_config, sleep_duration=minutes + ) + if not self._sed_config_task_scheduled: + await self.schedule_task_when_awake(self._configure_sed_task()) + self._sed_config_task_scheduled = True + _LOGGER.debug( + "set_sleep_duration | Device %s | config scheduled", + self.name, + ) + return True + + # endregion + # region Properties + @property + @raise_not_loaded + def awake_duration(self) -> int: + """Duration in seconds a battery powered devices is awake.""" + if self._new_battery_config.awake_duration is not None: + return self._new_battery_config.awake_duration + if self._battery_config.awake_duration is not None: + return self._battery_config.awake_duration + return SED_DEFAULT_AWAKE_DURATION + + @property + @raise_not_loaded + def battery_config(self) -> BatteryConfig: + """Battery related configuration settings.""" + return BatteryConfig( + awake_duration=self.awake_duration, + clock_interval=self.clock_interval, + clock_sync=self.clock_sync, + maintenance_interval=self.maintenance_interval, + sleep_duration=self.sleep_duration, + ) + + @property + @raise_not_loaded + def clock_interval(self) -> int: + """Return the clock interval value.""" + if self._new_battery_config.clock_interval is not None: + return self._new_battery_config.clock_interval + if self._battery_config.clock_interval is not None: + return self._battery_config.clock_interval + return SED_DEFAULT_CLOCK_INTERVAL + + @property + @raise_not_loaded + def clock_sync(self) -> bool: + """Indicate if the internal clock must be synced.""" + if self._new_battery_config.clock_sync is not None: + return self._new_battery_config.clock_sync + if self._battery_config.clock_sync is not None: + return self._battery_config.clock_sync + return SED_DEFAULT_CLOCK_SYNC + + @property + @raise_not_loaded + def maintenance_interval(self) -> int: + """Return the maintenance interval value. + + When value is scheduled to be changed the return value is the optimistic value. + """ + if self._new_battery_config.maintenance_interval is not None: + return self._new_battery_config.maintenance_interval + if self._battery_config.maintenance_interval is not None: + return self._battery_config.maintenance_interval + return SED_DEFAULT_MAINTENANCE_INTERVAL + + @property + def sed_config_task_scheduled(self) -> bool: + """Check if a configuration task is scheduled.""" + return self._sed_config_task_scheduled + + @property + @raise_not_loaded + def sleep_duration(self) -> int: + """Return the sleep duration value in minutes. + + When value is scheduled to be changed the return value is the optimistic value. + """ + if self._new_battery_config.sleep_duration is not None: + return self._new_battery_config.sleep_duration + if self._battery_config.sleep_duration is not None: + return self._battery_config.sleep_duration + return SED_DEFAULT_SLEEP_DURATION + + # endregion + async def _configure_sed_task(self) -> bool: + """Configure SED settings. Returns True if successful.""" + _LOGGER.debug( + "_configure_sed_task | Device %s | start", + self.name, + ) + self._sed_config_task_scheduled = False + change_required = True + if self._new_battery_config.awake_duration is not None: + change_required = True + if self._new_battery_config.clock_interval is not None: + change_required = True + if self._new_battery_config.clock_sync is not None: + change_required = True + if self._new_battery_config.maintenance_interval is not None: + change_required = True + if self._new_battery_config.sleep_duration is not None: + change_required = True + if not change_required: + _LOGGER.debug( + "_configure_sed_task | Device %s | no change", + self.name, + ) + return True + _LOGGER.debug( + "_configure_sed_task | Device %s | request change", + self.name, + ) + if not await self.sed_configure( + awake_duration=self.awake_duration, + clock_interval=self.clock_interval, + clock_sync=self.clock_sync, + maintenance_interval=self.maintenance_interval, + sleep_duration=self.sleep_duration, ): - self._queue_request( - NodePingRequest(self._mac), - callback, + return False + + return True + + async def node_info_update( + self, node_info: NodeInfoResponse | None = None + ) -> NodeInfo | None: + """Update Node (hardware) information.""" + + if node_info is None and self.skip_update(self._node_info, 86400): + return self._node_info + return await super().node_info_update(node_info) + + async def _awake_response(self, response: PlugwiseResponse) -> bool: + """Process awake message.""" + if not isinstance(response, NodeAwakeResponse): + raise MessageError( + f"Invalid response message type ({response.__class__.__name__}) received, expected NodeAwakeResponse" ) + _LOGGER.debug("Device %s is awake for %s", self.name, response.awake_type) + self._set_cache(CACHE_AWAKE_TIMESTAMP, response.timestamp) + await self._available_update_state(True, response.timestamp) + + # Pre populate the last awake timestamp + if self._last_awake.get(response.awake_type) is None: + self._last_awake[response.awake_type] = response.timestamp + + # Skip awake messages when they are shortly after each other + elif ( + self._last_awake[response.awake_type] + timedelta(seconds=AWAKE_RETRY) + > response.timestamp + ): + return True + + self._last_awake[response.awake_type] = response.timestamp + + tasks: list[Coroutine[Any, Any, None]] = [ + self._reset_awake(response.timestamp), + self.publish_feature_update_to_subscribers( + NodeFeature.BATTERY, + self._battery_config, + ), + ] + self._delayed_task = self._loop.create_task( + self._send_tasks(), name=f"Delayed update for {self._mac_in_str}" + ) + if response.awake_type == NodeAwakeResponseType.MAINTENANCE: + self._last_awake_reason = "Maintenance" + self._set_cache(CACHE_AWAKE_REASON, "Maintenance") + + if not self._maintenance_interval_restored_from_cache: + self._detect_maintenance_interval(response.timestamp) + if self._ping_at_awake: + tasks.append(self.update_ping_at_awake()) + elif response.awake_type == NodeAwakeResponseType.FIRST: + self._last_awake_reason = "First" + self._set_cache(CACHE_AWAKE_REASON, "First") + elif response.awake_type == NodeAwakeResponseType.STARTUP: + self._last_awake_reason = "Startup" + self._set_cache(CACHE_AWAKE_REASON, "Startup") + elif response.awake_type == NodeAwakeResponseType.STATE: + self._last_awake_reason = "State update" + self._set_cache(CACHE_AWAKE_REASON, "State update") + elif response.awake_type == NodeAwakeResponseType.BUTTON: + self._last_awake_reason = "Button press" + self._set_cache(CACHE_AWAKE_REASON, "Button press") + if self._ping_at_awake: + tasks.append(self.update_ping_at_awake()) + + await gather(*tasks) + return True + + async def update_ping_at_awake(self) -> None: + """Get ping statistics.""" + await self.ping_update() + + def _detect_maintenance_interval(self, timestamp: datetime) -> None: + """Detect current maintenance interval.""" + if self._last_awake[NodeAwakeResponseType.MAINTENANCE] == timestamp: + return + new_interval_in_sec = ( + timestamp - self._last_awake[NodeAwakeResponseType.MAINTENANCE] + ).seconds + new_interval_in_min = round(new_interval_in_sec // 60) + _LOGGER.warning( + "Detect current maintenance interval for %s: %s (seconds), current %s (min)", + self.name, + new_interval_in_sec, + self._battery_config.maintenance_interval, + ) + # Validate new maintenance interval in seconds but store it in minutes + if (new_interval_in_sec + SED_MAX_MAINTENANCE_INTERVAL_OFFSET) < ( + SED_DEFAULT_MAINTENANCE_INTERVAL * 60 + ): + self._battery_config = replace( + self._battery_config, maintenance_interval=new_interval_in_min + ) + self._set_cache(CACHE_MAINTENANCE_INTERVAL, new_interval_in_min) + elif (new_interval_in_sec - SED_MAX_MAINTENANCE_INTERVAL_OFFSET) > ( + SED_DEFAULT_MAINTENANCE_INTERVAL * 60 + ): + self._battery_config = replace( + self._battery_config, maintenance_interval=new_interval_in_min + ) + self._set_cache(CACHE_MAINTENANCE_INTERVAL, new_interval_in_min) else: - _LOGGER.debug( - "Drop ping request for SED %s because no callback is registered", - self.mac, + # Within off-set margin of default, so use the default + self._battery_config = replace( + self._battery_config, + maintenance_interval=SED_DEFAULT_MAINTENANCE_INTERVAL, + ) + self._set_cache( + CACHE_MAINTENANCE_INTERVAL, SED_DEFAULT_MAINTENANCE_INTERVAL + ) + self._maintenance_interval_restored_from_cache = True + + async def _reset_awake(self, last_alive: datetime) -> None: + """Reset node alive state.""" + if self._awake_future is not None and not self._awake_future.done(): + self._awake_future.set_result(True) + # Setup new maintenance timer + self._awake_future = self._loop.create_future() + self._awake_timer_task = self._loop.create_task( + self._awake_timer(), name=f"Node awake timer for {self._mac_in_str}" + ) + + async def _awake_timer(self) -> None: + """Task to monitor to get next awake in time. If not it sets device to be unavailable.""" + # wait for next maintenance timer, but allow missing one + if self._awake_future is None: + return + timeout_interval = self.maintenance_interval * 60 * 2.1 + try: + await wait_for( + self._awake_future, + timeout=timeout_interval, + ) + except TimeoutError: + # No maintenance awake message within expected time frame + # Mark node as unavailable + if self._available: + last_awake = self._last_awake.get(NodeAwakeResponseType.MAINTENANCE) + _LOGGER.warning( + "No awake message received from %s | last_maintenance_awake=%s | interval=%s (%s) | Marking node as unavailable", + self.name, + last_awake, + self.maintenance_interval, + timeout_interval, + ) + await self._available_update_state(False) + except CancelledError: + pass + self._awake_future = None + + async def _send_tasks(self) -> None: + """Send all tasks in queue.""" + if len(self._send_task_queue) == 0: + return + await self._send_task_lock.acquire() + task_result = await gather(*self._send_task_queue) + if not all(task_result): + _LOGGER.warning( + "Executed %s tasks (result=%s) for %s", + len(self._send_task_queue), + task_result, + self.name, ) + self._send_task_queue = [] + self._send_task_lock.release() - def _wake_up_interval_accepted(self): - """Callback after wake up interval is received and accepted by SED.""" - self._wake_up_interval = self._new_maintenance_interval + async def schedule_task_when_awake( + self, task_fn: Coroutine[Any, Any, bool] + ) -> None: + """Add task to queue to be executed when node is awake.""" + await self._send_task_lock.acquire() + self._send_task_queue.append(task_fn) + self._send_task_lock.release() - # TODO: 20220125 snakestyle name - # pylint: disable=invalid-name - def Configure_SED( + async def sed_configure( # pylint: disable=too-many-arguments self, - stay_active=SED_STAY_ACTIVE, - sleep_for=SED_SLEEP_FOR, - maintenance_interval=SED_MAINTENANCE_INTERVAL, - clock_sync=SED_CLOCK_SYNC, - clock_interval=SED_CLOCK_INTERVAL, - ): - """Reconfigure the sleep/awake settings for a SED send at next awake of SED""" - message = NodeSleepConfigRequest( - self._mac, - stay_active, + awake_duration: int, + sleep_duration: int, + maintenance_interval: int, + clock_sync: bool, + clock_interval: int, + ) -> bool: + """Reconfigure the sleep/awake settings for a SED send at next awake of SED.""" + request = NodeSleepConfigRequest( + self._send, + self._mac_in_bytes, + awake_duration, maintenance_interval, - sleep_for, + sleep_duration, clock_sync, clock_interval, ) - self._queue_request(message, self._wake_up_interval_accepted) - self._new_maintenance_interval = maintenance_interval - _LOGGER.info( - "Queue %s message to be send at next awake of SED node %s", - message.__class__.__name__, - self.mac, + _LOGGER.debug( + "sed_configure | Device %s | awake_duration=%s | clock_interval=%s | clock_sync=%s | maintenance_interval=%s | sleep_duration=%s", + self.name, + awake_duration, + clock_interval, + clock_sync, + maintenance_interval, + sleep_duration, + ) + response = await request.send() + if (response := await request.send()) is None: + self._new_battery_config = BatteryConfig() + _LOGGER.warning( + "No response from %s to configure sleep settings request", self.name + ) + return False + if response.response_type == NodeResponseType.SED_CONFIG_FAILED: + self._new_battery_config = BatteryConfig() + _LOGGER.warning("Failed to configure sleep settings for %s", self.name) + return False + if response.response_type == NodeResponseType.SED_CONFIG_ACCEPTED: + await self._sed_configure_update( + awake_duration, + clock_interval, + clock_sync, + maintenance_interval, + sleep_duration, + ) + self._new_battery_config = BatteryConfig() + return True + _LOGGER.warning( + "Unexpected response type %s for %s", + response.response_type, + self.name, + ) + return False + + # pylint: disable=too-many-arguments + async def _sed_configure_update( + self, + awake_duration: int = SED_DEFAULT_AWAKE_DURATION, + clock_interval: int = SED_DEFAULT_CLOCK_INTERVAL, + clock_sync: bool = SED_DEFAULT_CLOCK_SYNC, + maintenance_interval: int = SED_DEFAULT_MAINTENANCE_INTERVAL, + sleep_duration: int = SED_DEFAULT_SLEEP_DURATION, + ) -> None: + """Process result of SED configuration update.""" + self._battery_config = BatteryConfig( + awake_duration=awake_duration, + clock_interval=clock_interval, + clock_sync=clock_sync, + maintenance_interval=maintenance_interval, + sleep_duration=sleep_duration, + ) + self._set_cache(CACHE_MAINTENANCE_INTERVAL, str(maintenance_interval)) + self._set_cache(CACHE_AWAKE_DURATION, str(awake_duration)) + self._set_cache(CACHE_CLOCK_INTERVAL, str(clock_interval)) + self._set_cache(CACHE_SLEEP_DURATION, str(sleep_duration)) + if clock_sync: + self._set_cache(CACHE_CLOCK_SYNC, "True") + else: + self._set_cache(CACHE_CLOCK_SYNC, "False") + await gather( + *[ + self.save_cache(), + self.publish_feature_update_to_subscribers( + NodeFeature.BATTERY, + self._battery_config, + ), + ] ) + + @raise_not_loaded + async def get_state(self, features: tuple[NodeFeature]) -> dict[NodeFeature, Any]: + """Update latest state for given feature.""" + states: dict[NodeFeature, Any] = {} + for feature in features: + if feature not in self._features: + raise NodeError( + f"Update of feature '{feature.name}' is " + + f"not supported for {self.mac}" + ) + if feature == NodeFeature.INFO: + states[NodeFeature.INFO] = await self.node_info_update() + elif feature == NodeFeature.BATTERY: + states[NodeFeature.BATTERY] = self._battery_config + else: + state_result = await super().get_state((feature,)) + states[feature] = state_result[feature] + return states diff --git a/plugwise_usb/nodes/sense.py b/plugwise_usb/nodes/sense.py index 88b75e0a7..e73724ef0 100644 --- a/plugwise_usb/nodes/sense.py +++ b/plugwise_usb/nodes/sense.py @@ -1,84 +1,140 @@ """Plugwise Sense node object.""" + +from __future__ import annotations + +from collections.abc import Awaitable, Callable import logging +from typing import Any, Final -from ..constants import ( - FEATURE_HUMIDITY, - FEATURE_PING, - FEATURE_RSSI_IN, - FEATURE_RSSI_OUT, - FEATURE_TEMPERATURE, - SENSE_HUMIDITY_MULTIPLIER, - SENSE_HUMIDITY_OFFSET, - SENSE_TEMPERATURE_MULTIPLIER, - SENSE_TEMPERATURE_OFFSET, -) -from ..messages.responses import SenseReportResponse +from ..api import NodeEvent, NodeFeature +from ..connection import StickController +from ..exceptions import MessageError, NodeError +from ..messages.responses import SENSE_REPORT_ID, PlugwiseResponse, SenseReportResponse from ..nodes.sed import NodeSED +from .helpers import raise_not_loaded +from .helpers.firmware import SENSE_FIRMWARE_SUPPORT _LOGGER = logging.getLogger(__name__) +# Sense calculations +SENSE_HUMIDITY_MULTIPLIER: Final = 125 +SENSE_HUMIDITY_OFFSET: Final = 6 +SENSE_TEMPERATURE_MULTIPLIER: Final = 175.72 +SENSE_TEMPERATURE_OFFSET: Final = 46.85 + +SENSE_FEATURES: Final = ( + NodeFeature.INFO, + NodeFeature.TEMPERATURE, + NodeFeature.HUMIDITY, +) + + class PlugwiseSense(NodeSED): - """provides interface to the Plugwise Sense nodes""" - - def __init__(self, mac, address, message_sender): - super().__init__(mac, address, message_sender) - self._features = ( - FEATURE_HUMIDITY["id"], - FEATURE_PING["id"], - FEATURE_RSSI_IN["id"], - FEATURE_RSSI_OUT["id"], - FEATURE_TEMPERATURE["id"], + """Plugwise Sense node.""" + + def __init__( + self, + mac: str, + address: int, + controller: StickController, + loaded_callback: Callable[[NodeEvent, str], Awaitable[None]], + ): + """Initialize Scan Device.""" + super().__init__(mac, address, controller, loaded_callback) + + self._humidity: float | None = None + self._temperature: float | None = None + + self._sense_subscription: Callable[[], None] | None = None + + async def load(self) -> bool: + """Load and activate Sense node features.""" + if self._loaded: + return True + self._node_info.is_battery_powered = True + if self._cache_enabled: + _LOGGER.debug("Load Sense node %s from cache", self._node_info.mac) + if await self._load_from_cache(): + self._loaded = True + self._setup_protocol( + SENSE_FIRMWARE_SUPPORT, + (NodeFeature.INFO, NodeFeature.TEMPERATURE, NodeFeature.HUMIDITY), + ) + if await self.initialize(): + await self._loaded_callback(NodeEvent.LOADED, self.mac) + return True + _LOGGER.debug("Load of Sense node %s failed", self._node_info.mac) + return False + + @raise_not_loaded + async def initialize(self) -> bool: + """Initialize Sense node.""" + if self._initialized: + return True + self._sense_subscription = await self._message_subscribe( + self._sense_report, + self._mac_in_bytes, + (SENSE_REPORT_ID,), ) - self._temperature = None - self._humidity = None - - @property - def humidity(self) -> int: - """Return the current humidity.""" - return self._humidity - - @property - def temperature(self) -> int: - """Return the current temperature.""" - return self._temperature - - def message_for_sense(self, message): - """Process received message""" - if isinstance(message, SenseReportResponse): - self._process_sense_report(message) - else: - _LOGGER.info( - "Unsupported message %s received from %s", - message.__class__.__name__, - self.mac, - ) + return await super().initialize() - def _process_sense_report(self, message): + async def unload(self) -> None: + """Unload node.""" + self._loaded = False + if self._sense_subscription is not None: + self._sense_subscription() + await super().unload() + + async def _sense_report(self, response: PlugwiseResponse) -> bool: """Process sense report message to extract current temperature and humidity values.""" - if message.temperature.value != 65535: - new_temperature = int( - SENSE_TEMPERATURE_MULTIPLIER * (message.temperature.value / 65536) + if not isinstance(response, SenseReportResponse): + raise MessageError( + f"Invalid response message type ({response.__class__.__name__}) received, expected SenseReportResponse" + ) + await self._available_update_state(True, response.timestamp) + if response.temperature.value != 65535: + self._temperature = int( + SENSE_TEMPERATURE_MULTIPLIER * (response.temperature.value / 65536) - SENSE_TEMPERATURE_OFFSET ) - if self._temperature != new_temperature: - self._temperature = new_temperature - _LOGGER.debug( - "Sense report received from %s with new temperature level of %s", - self.mac, - str(self._temperature), - ) - self.do_callback(FEATURE_TEMPERATURE["id"]) - if message.humidity.value != 65535: - new_humidity = int( - SENSE_HUMIDITY_MULTIPLIER * (message.humidity.value / 65536) + await self.publish_feature_update_to_subscribers( + NodeFeature.TEMPERATURE, self._temperature + ) + if response.humidity.value != 65535: + self._humidity = int( + SENSE_HUMIDITY_MULTIPLIER * (response.humidity.value / 65536) - SENSE_HUMIDITY_OFFSET ) - if self._humidity != new_humidity: - self._humidity = new_humidity - _LOGGER.debug( - "Sense report received from %s with new humidity level of %s", - self.mac, - str(self._humidity), + await self.publish_feature_update_to_subscribers( + NodeFeature.HUMIDITY, self._humidity + ) + return True + return False + + @raise_not_loaded + async def get_state(self, features: tuple[NodeFeature]) -> dict[NodeFeature, Any]: + """Update latest state for given feature.""" + states: dict[NodeFeature, Any] = {} + for feature in features: + _LOGGER.debug( + "Updating node %s - feature '%s'", + self._node_info.mac, + feature, + ) + if feature not in self._features: + raise NodeError( + f"Update of feature '{feature.name}' is not supported for {self.mac}" ) - self.do_callback(FEATURE_HUMIDITY["id"]) + if feature == NodeFeature.TEMPERATURE: + states[NodeFeature.TEMPERATURE] = self._temperature + elif feature == NodeFeature.HUMIDITY: + states[NodeFeature.HUMIDITY] = self._humidity + elif feature == NodeFeature.PING: + states[NodeFeature.PING] = await self.ping_update() + else: + state_result = await super().get_state((feature,)) + states[feature] = state_result[feature] + if NodeFeature.AVAILABLE not in states: + states[NodeFeature.AVAILABLE] = self.available_state + return states diff --git a/plugwise_usb/nodes/stealth.py b/plugwise_usb/nodes/stealth.py index 102e309c8..33be3907a 100644 --- a/plugwise_usb/nodes/stealth.py +++ b/plugwise_usb/nodes/stealth.py @@ -3,4 +3,4 @@ class PlugwiseStealth(PlugwiseCircle): - """provides interface to the Plugwise Stealth nodes""" + """provides interface to the Plugwise Stealth nodes.""" diff --git a/plugwise_usb/nodes/switch.py b/plugwise_usb/nodes/switch.py index 8bde56176..bf84fb10a 100644 --- a/plugwise_usb/nodes/switch.py +++ b/plugwise_usb/nodes/switch.py @@ -1,57 +1,162 @@ """Plugwise switch node object.""" + +from __future__ import annotations + +from asyncio import gather +from collections.abc import Awaitable, Callable +from datetime import datetime import logging +from typing import Any, Final -from ..constants import FEATURE_PING, FEATURE_RSSI_IN, FEATURE_RSSI_OUT, FEATURE_SWITCH -from ..messages.responses import NodeSwitchGroupResponse +from ..api import NodeEvent, NodeFeature +from ..connection import StickController +from ..exceptions import MessageError, NodeError +from ..messages.responses import ( + NODE_SWITCH_GROUP_ID, + NodeSwitchGroupResponse, + PlugwiseResponse, +) from ..nodes.sed import NodeSED +from .helpers import raise_not_loaded +from .helpers.firmware import SWITCH_FIRMWARE_SUPPORT _LOGGER = logging.getLogger(__name__) +CACHE_SWITCH_STATE: Final = "switch_state" +CACHE_SWITCH_TIMESTAMP: Final = "switch_timestamp" + class PlugwiseSwitch(NodeSED): - """provides interface to the Plugwise Switch nodes""" - - def __init__(self, mac, address, message_sender): - super().__init__(mac, address, message_sender) - self._features = ( - FEATURE_PING["id"], - FEATURE_RSSI_IN["id"], - FEATURE_RSSI_OUT["id"], - FEATURE_SWITCH["id"], + """Plugwise Switch node.""" + + def __init__( + self, + mac: str, + address: int, + controller: StickController, + loaded_callback: Callable[[NodeEvent, str], Awaitable[None]], + ): + """Initialize Scan Device.""" + super().__init__(mac, address, controller, loaded_callback) + self._switch_subscription: Callable[[], None] | None = None + self._switch_state: bool | None = None + self._switch: bool | None = None + + async def load(self) -> bool: + """Load and activate Switch node features.""" + if self._loaded: + return True + if self._cache_enabled: + _LOGGER.debug("Load Switch node %s from cache", self._node_info.mac) + await self._load_from_cache() + else: + self._load_defaults() + self._loaded = True + self._setup_protocol( + SWITCH_FIRMWARE_SUPPORT, + (NodeFeature.BATTERY, NodeFeature.INFO, NodeFeature.PING, NodeFeature.SWITCH), + ) + if await self.initialize(): + await self._loaded_callback(NodeEvent.LOADED, self.mac) + return True + _LOGGER.debug("Load of Switch node %s failed", self._node_info.mac) + return False + + @raise_not_loaded + async def initialize(self) -> bool: + """Initialize Switch node.""" + if self._initialized: + return True + self._switch_subscription = await self._message_subscribe( + self._switch_group, + self._mac_in_bytes, + (NODE_SWITCH_GROUP_ID,), ) - self._switch_state = False + return await super().initialize() + + async def unload(self) -> None: + """Unload node.""" + if self._switch_subscription is not None: + self._switch_subscription() + await super().unload() + + # region Properties @property + @raise_not_loaded def switch(self) -> bool: - """Return the last known switch state""" - return self._switch_state + """Current state of switch.""" + return bool(self._switch_state) - def message_for_switch(self, message): - """Process received message""" - if isinstance(message, NodeSwitchGroupResponse): - _LOGGER.debug( - "Switch group request %s received from %s for group id %s", - str(message.power_state), - self.mac, - str(message.group), + #endregion + + async def _switch_group(self, response: PlugwiseResponse) -> bool: + """Switch group request from Switch.""" + if not isinstance(response, NodeSwitchGroupResponse): + raise MessageError( + f"Invalid response message type ({response.__class__.__name__}) received, expected NodeSwitchGroupResponse" ) - self._process_switch_group(message) + await gather( + self._available_update_state(True, response.timestamp), + self._switch_state_update(response.switch_state, response.timestamp) + ) + return True - def _process_switch_group(self, message): - """Switch group request from Scan""" - if message.power_state == 0: - # turn off => clear motion - if self._switch_state: - self._switch_state = False - self.do_callback(FEATURE_SWITCH["id"]) - elif message.power_state == 1: - # turn on => motion - if not self._switch_state: + async def _switch_state_update( + self, switch_state: bool, timestamp: datetime + ) -> None: + """Process motion state update.""" + _LOGGER.debug( + "_switch_state_update for %s: %s -> %s", + self.name, + self._switch_state, + switch_state, + ) + state_update = False + # Switch on + if switch_state: + self._set_cache(CACHE_SWITCH_STATE, "True") + if self._switch_state is None or not self._switch: self._switch_state = True - self.do_callback(FEATURE_SWITCH["id"]) + state_update = True else: + # Switch off + self._set_cache(CACHE_SWITCH_STATE, "False") + if self._switch is None or self._switch: + self._switch_state = False + state_update = True + self._set_cache(CACHE_SWITCH_TIMESTAMP, timestamp) + if state_update: + self._switch = switch_state + await gather( + *[ + self.publish_feature_update_to_subscribers( + NodeFeature.SWITCH, self._switch_state + ), + self.save_cache(), + ] + ) + + @raise_not_loaded + async def get_state(self, features: tuple[NodeFeature]) -> dict[NodeFeature, Any]: + """Update latest state for given feature.""" + states: dict[NodeFeature, Any] = {} + for feature in features: _LOGGER.debug( - "Unknown power_state (%s) received from %s", - str(message.power_state), - self.mac, + "Updating node %s - feature '%s'", + self._node_info.mac, + feature, ) + if feature not in self._features: + raise NodeError( + f"Update of feature '{feature.name}' is " + + f"not supported for {self.mac}" + ) + if feature == NodeFeature.SWITCH: + states[NodeFeature.SWITCH] = self._switch_state + else: + state_result = await super().get_state((feature,)) + states[feature] = state_result[feature] + if NodeFeature.AVAILABLE not in states: + states[NodeFeature.AVAILABLE] = self.available_state + return states diff --git a/plugwise_usb/parser.py b/plugwise_usb/parser.py deleted file mode 100644 index 2e0fb6e2a..000000000 --- a/plugwise_usb/parser.py +++ /dev/null @@ -1,137 +0,0 @@ -"""Data parser for USB-Stick - -The parser will: -- buffer receiving data -- filter out received zigbee routing data -- collect message data by detecting header and footer -- detect message type based on message ID or fixed sequence ID -- validate received data on checksum -- decode collected data into a response message instance -- pass over received messages to message_processor (controller.py) - -""" - -import logging - -from .constants import MESSAGE_FOOTER, MESSAGE_HEADER -from .exceptions import ( - InvalidMessageChecksum, - InvalidMessageFooter, - InvalidMessageHeader, - InvalidMessageLength, -) -from .messages.responses import get_message_response - -_LOGGER = logging.getLogger(__name__) - - -class PlugwiseParser: - """Transform Plugwise message from wire format to response message object.""" - - def __init__(self, message_processor): - self.message_processor = message_processor - self._buffer = bytes([]) - self._parsing = False - self._message = None - - def feed(self, data): - """Add new incoming data to buffer and try to process""" - _LOGGER.debug("Feed data: %s", str(data)) - self._buffer += data - if len(self._buffer) >= 8: - if not self._parsing: - self.parse_data() - - def next_message(self, message): - """Process next packet if present""" - try: - self.message_processor(message) - # TODO: narrow exception - except Exception as err: # pylint: disable=broad-except - _LOGGER.error( - "Error while processing %s : %s", - self._message.__class__.__name__, - err, - ) - _LOGGER.error(err, exc_info=True) - - def parse_data(self): - """Process next set of packet data""" - _LOGGER.debug("Parse data: %s ", str(self._buffer)) - if not self._parsing: - self._parsing = True - - # Lookup header of message in buffer - _LOGGER.debug( - "Lookup message header (%s) in (%s)", - str(MESSAGE_HEADER), - str(self._buffer), - ) - if (header_index := self._buffer.find(MESSAGE_HEADER)) == -1: - _LOGGER.debug("No valid message header found yet") - else: - _LOGGER.debug( - "Valid message header found at index %s", str(header_index) - ) - self._buffer = self._buffer[header_index:] - - # Header available, lookup footer of message in buffer - _LOGGER.debug( - "Lookup message footer (%s) in (%s)", - str(MESSAGE_FOOTER), - str(self._buffer), - ) - if (footer_index := self._buffer.find(MESSAGE_FOOTER)) == -1: - _LOGGER.debug("No valid message footer found yet") - else: - _LOGGER.debug( - "Valid message footer found at index %s", str(footer_index) - ) - self._message = get_message_response( - self._buffer[4:8], footer_index, self._buffer[8:12] - ) - if self._message: - try: - self._message.deserialize(self._buffer[: footer_index + 2]) - except ( - InvalidMessageChecksum, - InvalidMessageFooter, - InvalidMessageHeader, - InvalidMessageLength, - ) as err: - _LOGGER.warning(err) - # TODO: narrow exception - except Exception as err: # pylint: disable=broad-except - _LOGGER.error( - "Failed to parse %s message (%s)", - self._message.__class__.__name__, - str(self._buffer[: footer_index + 2]), - ) - _LOGGER.error(err) - else: - # Submit message - self.next_message(self._message) - # Parse remaining buffer - self.reset_parser(self._buffer[footer_index + 2 :]) - else: - # skip this message, so remove header from buffer - _LOGGER.error( - "Skip unknown message %s", - str(self._buffer[: footer_index + 2]), - ) - self.reset_parser(self._buffer[6:]) - self._parsing = False - else: - _LOGGER.debug("Skip parsing session") - - def reset_parser(self, new_buffer=bytes([])): - _LOGGER.debug("Reset parser : %s", new_buffer) - if new_buffer == b"\x83": - # Skip additional byte sometimes appended after footer - self._buffer = bytes([]) - else: - self._buffer = new_buffer - self._message = None - self._parsing = False - if len(self._buffer) > 0: - self.parse_data() diff --git a/plugwise_usb/util.py b/plugwise_usb/util.py deleted file mode 100644 index 194875be3..000000000 --- a/plugwise_usb/util.py +++ /dev/null @@ -1,286 +0,0 @@ -"""Use of this source code is governed by the MIT license found in the LICENSE file. - -Plugwise protocol helpers -""" -from __future__ import annotations - -import binascii -import datetime -import re -import struct - -import crcmod - -from .constants import HW_MODELS, LOGADDR_OFFSET, PLUGWISE_EPOCH, UTF8_DECODE - -crc_fun = crcmod.mkCrcFun(0x11021, rev=False, initCrc=0x0000, xorOut=0x0000) - - -# NOTE: this function version_to_model is shared between Smile and USB -def version_to_model(version: str) -> str: - """Translate hardware_version to device type.""" - model = HW_MODELS.get(version) - if model is None: - model = HW_MODELS.get(version[4:10]) - if model is None: - # Try again with reversed order - model = HW_MODELS.get(version[-2:] + version[-4:-2] + version[-6:-4]) - - return model if model is not None else "Unknown" - - -def validate_mac(mac: str) -> bool: - if not re.match("^[A-F0-9]+$", mac): - return False - try: - _ = int(mac, 16) - except ValueError: - return False - return True - - -def inc_seq_id(seq_id: str | None, value: int = 1) -> bytearray | bytes: - """Increment sequence id by value - - :return: 4 bytes - """ - if seq_id is None: - return b"0000" - # Max seq_id = b'FFFB' - # b'FFFC' reserved for message - # b'FFFD' reserved for 'NodeJoinAckResponse' message - # b'FFFE' reserved for 'NodeSwitchGroupResponse' message - # b'FFFF' reserved for 'NodeAwakeResponse' message - if (temp_int := int(seq_id, 16) + value) >= 65532: - temp_int = 0 - temp_str = str(hex(temp_int)).lstrip("0x").upper() - while len(temp_str) < 4: - temp_str = "0" + temp_str - return temp_str.encode() - - -# octals (and hex) type as int according to https://docs.python.org/3/library/stdtypes.html -def uint_to_int(val: int, octals: int) -> int: - """Compute the 2's compliment of int value val for negative values""" - bits = octals << 2 - if (val & (1 << (bits - 1))) != 0: - val = val - (1 << bits) - return val - - -# octals (and hex) type as int according to https://docs.python.org/3/library/stdtypes.html -def int_to_uint(val: int, octals: int) -> int: - """Compute the 2's compliment of int value val for negative values""" - bits = octals << 2 - if val < 0: - val = val + (1 << bits) - return val - - -class BaseType: - def __init__(self, value, length) -> None: # type: ignore[no-untyped-def] - self.value = value - self.length = length - - def serialize(self): # type: ignore[no-untyped-def] - return bytes(self.value, UTF8_DECODE) - - def deserialize(self, val): # type: ignore[no-untyped-def] - self.value = val - - def __len__(self): # type: ignore[no-untyped-def] - return self.length - - -class CompositeType: - def __init__(self) -> None: - self.contents: list = [] - # datetime because of DateTime and Time and RealClockDate - self.value: datetime.datetime | datetime.time | datetime.date | None = None - - def serialize(self): # type: ignore[no-untyped-def] - return b"".join(a.serialize() for a in self.contents) - - def deserialize(self, val): # type: ignore[no-untyped-def] - for content in self.contents: - myval = val[: len(content)] - content.deserialize(myval) - val = val[len(myval) :] - return val - - def __len__(self): # type: ignore[no-untyped-def] - return sum(len(x) for x in self.contents) - - -class String(BaseType): - pass - - -class Int(BaseType): - def __init__(self, value, length=2, negative: bool = True) -> None: # type: ignore[no-untyped-def] - super().__init__(value, length) - self.negative = negative - - def serialize(self): # type: ignore[no-untyped-def] - fmt = "%%0%dX" % self.length - return bytes(fmt % self.value, UTF8_DECODE) - - def deserialize(self, val): # type: ignore[no-untyped-def] - self.value = int(val, 16) - if self.negative: - mask = 1 << (self.length * 4 - 1) - self.value = -(self.value & mask) + (self.value & ~mask) - - -class SInt(BaseType): - def __init__(self, value, length=2) -> None: # type: ignore[no-untyped-def] - super().__init__(value, length) - - @staticmethod - def negative(val, octals): # type: ignore[no-untyped-def] - """Compute the 2's compliment of int value val for negative values""" - bits = octals << 2 - if (val & (1 << (bits - 1))) != 0: - val = val - (1 << bits) - return val - - def serialize(self): # type: ignore[no-untyped-def] - fmt = "%%0%dX" % self.length - return fmt % int_to_uint(self.value, self.length) - - def deserialize(self, val): # type: ignore[no-untyped-def] - # TODO: negative is not initialized! 20220405 - self.value = self.negative(int(val, 16), self.length) # type: ignore [no-untyped-call] - - -class UnixTimestamp(Int): - def __init__(self, value, length=8) -> None: # type: ignore[no-untyped-def] - Int.__init__(self, value, length, False) - - def deserialize(self, val): # type: ignore[no-untyped-def] - # TODO: Solution, fix Int 20220405 - Int.deserialize(self, val) # type: ignore[no-untyped-call] - self.value = datetime.datetime.fromtimestamp(self.value) - - -class Year2k(Int): - """year value that is offset from the year 2000""" - - def deserialize(self, val): # type: ignore[no-untyped-def] - # TODO: Solution, fix Int 20220405 - Int.deserialize(self, val) # type: ignore[no-untyped-call] - self.value += PLUGWISE_EPOCH - - -class DateTime(CompositeType): - """datetime value as used in the general info response - format is: YYMMmmmm - where year is offset value from the epoch which is Y2K - and last four bytes are offset from the beginning of the month in minutes - """ - - def __init__(self, year: int = 0, month: int = 1, minutes: int = 0) -> None: - CompositeType.__init__(self) - self.year = Year2k(year - PLUGWISE_EPOCH, 2) - self.month = Int(month, 2, False) - self.minutes = Int(minutes, 4, False) - self.contents += [self.year, self.month, self.minutes] - - def deserialize(self, val: int) -> None: - # TODO: Solution, fix Int 20220405 - CompositeType.deserialize(self, val) # type: ignore[no-untyped-call] - if self.minutes.value == 65535: - self.value = None - else: - self.value = datetime.datetime( - year=self.year.value, month=self.month.value, day=1 - ) + datetime.timedelta(minutes=self.minutes.value) - - -class Time(CompositeType): - """time value as used in the clock info response""" - - def __init__(self, hour: int = 0, minute: int = 0, second: int = 0) -> None: - CompositeType.__init__(self) - self.hour = Int(hour, 2, False) - self.minute = Int(minute, 2, False) - self.second = Int(second, 2, False) - self.contents += [self.hour, self.minute, self.second] - - def deserialize(self, val) -> None: # type: ignore[no-untyped-def] - # TODO: Solution, fix Int 20220405 - CompositeType.deserialize(self, val) # type: ignore[no-untyped-call] - self.value = datetime.time( - self.hour.value, self.minute.value, self.second.value - ) - - -class IntDec(BaseType): - def __init__(self, value, length=2) -> None: # type: ignore[no-untyped-def] - super().__init__(value, length) - - def serialize(self): # type: ignore[no-untyped-def] - fmt = "%%0%dd" % self.length - return bytes(fmt % self.value, UTF8_DECODE) - - def deserialize(self, val): # type: ignore[no-untyped-def] - self.value = val.decode(UTF8_DECODE) - - -class RealClockTime(CompositeType): - """time value as used in the realtime clock info response""" - - def __init__(self, hour: int = 0, minute: int = 0, second: int = 0) -> None: - CompositeType.__init__(self) - self.hour = IntDec(hour, 2) - self.minute = IntDec(minute, 2) - self.second = IntDec(second, 2) - self.contents += [self.second, self.minute, self.hour] - - def deserialize(self, val): # type: ignore[no-untyped-def] - # TODO: Solution, fix Int 20220405 - CompositeType.deserialize(self, val) # type: ignore[no-untyped-call] - self.value = datetime.time( - int(self.hour.value), - int(self.minute.value), - int(self.second.value), - ) - - -class RealClockDate(CompositeType): - """date value as used in the realtime clock info response""" - - def __init__(self, day: int = 0, month: int = 0, year: int = 0) -> None: - CompositeType.__init__(self) - self.day = IntDec(day, 2) - self.month = IntDec(month, 2) - self.year = IntDec(year - PLUGWISE_EPOCH, 2) - self.contents += [self.day, self.month, self.year] - - def deserialize(self, val): # type: ignore[no-untyped-def] - # TODO: Solution, fix Int 20220405 - CompositeType.deserialize(self, val) # type: ignore[no-untyped-call] - self.value = datetime.date( - int(self.year.value) + PLUGWISE_EPOCH, - int(self.month.value), - int(self.day.value), - ) - - -class Float(BaseType): - def __init__(self, value, length=4): # type: ignore[no-untyped-def] - super().__init__(value, length) - - def deserialize(self, val): # type: ignore[no-untyped-def] - hexval = binascii.unhexlify(val) - self.value = struct.unpack("!f", hexval)[0] - - -class LogAddr(Int): - def serialize(self): # type: ignore[no-untyped-def] - return bytes("%08X" % ((self.value * 32) + LOGADDR_OFFSET), UTF8_DECODE) - - def deserialize(self, val): # type: ignore[no-untyped-def] - # TODO: Solution, fix Int 20220405 - Int.deserialize(self, val) # type: ignore[no-untyped-call] - self.value = (self.value - LOGADDR_OFFSET) // 32 diff --git a/pyproject.toml b/pyproject.toml index 4cbbe0396..c6b80fa65 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "plugwise_usb" -version = "0.31.4a0" +version = "v0.40.0a52" license = {file = "LICENSE"} description = "Plugwise USB (Stick) module for Python 3." readme = "README.md" @@ -14,7 +14,6 @@ classifiers = [ "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", - "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Topic :: Home Automation", @@ -29,15 +28,11 @@ maintainers = [ { name = "CoMPaTech" }, { name = "dirixmjm" } ] -requires-python = ">=3.10.0" +requires-python = ">=3.11.0" dependencies = [ - "aiohttp", - "async_timeout", + "pyserial-asyncio-fast", + "aiofiles", "crcmod", - "defusedxml", - "munch", - "pyserial", - "python-dateutil", "semver", ] @@ -151,6 +146,7 @@ disable = [ "wrong-import-order", ] # for now (20201031) added the below while we are codemerging/-improving +# too-many-positional-arguments # missing-class-docstring # missing-function-docstring # missing-module-docstring @@ -223,7 +219,7 @@ omit= [ ] [tool.ruff] -target-version = "py312" +target-version = "py313" lint.select = [ "B002", # Python does not support the unary prefix increment @@ -310,6 +306,7 @@ lint.ignore = [ "PLW2901", # Outer {outer_kind} variable {name} overwritten by inner {inner_kind} target "UP006", # keep type annotation style as is "UP007", # keep type annotation style as is + "UP031" # Ignored due to performance: https://github.com/charliermarsh/ruff/issues/2923 #"UP038", # Use `X | Y` in `isinstance` call instead of `(X, Y)` ] diff --git a/requirements_test.txt b/requirements_test.txt index db933ea38..37a5e6d77 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -4,3 +4,6 @@ pytest-asyncio radon==6.0.1 types-python-dateutil +pyserial-asyncio-fast +aiofiles +freezegun \ No newline at end of file diff --git a/scripts/python-venv.sh b/scripts/python-venv.sh index a791ca28b..655803ccc 100755 --- a/scripts/python-venv.sh +++ b/scripts/python-venv.sh @@ -1,7 +1,7 @@ #!/usr/bin/env bash set -eu -pyversions=(3.12 3.11 3.10) +pyversions=( 3.13 ) my_path=$(git rev-parse --show-toplevel) my_venv=${my_path}/venv diff --git a/tests/bandit.yaml b/tests/bandit.yaml index 46566cc98..4a8cda726 100644 --- a/tests/bandit.yaml +++ b/tests/bandit.yaml @@ -12,7 +12,6 @@ tests: - B317 - B318 - B319 - - B320 - B601 - B602 - B604 diff --git a/tests/stick_test_data.py b/tests/stick_test_data.py new file mode 100644 index 000000000..8c7c57293 --- /dev/null +++ b/tests/stick_test_data.py @@ -0,0 +1,1375 @@ +from datetime import UTC, datetime, timedelta +import importlib + +pw_constants = importlib.import_module("plugwise_usb.constants") + +# test using utc timezone +utc_now = datetime.now(tz=UTC).replace(tzinfo=UTC) + + +# generate energy log timestamps with fixed hour timestamp used in tests +hour_timestamp = utc_now.replace(minute=0, second=0, microsecond=0) + +LOG_TIMESTAMPS = {} +_one_hour = timedelta(hours=1) +for x in range(168): + delta_month = hour_timestamp - hour_timestamp.replace(day=1, hour=0) + LOG_TIMESTAMPS[x] = ( + bytes(("%%0%dX" % 2) % (hour_timestamp.year - 2000), pw_constants.UTF8) + + bytes(("%%0%dX" % 2) % hour_timestamp.month, pw_constants.UTF8) + + bytes( + ("%%0%dX" % 4) + % int((delta_month.days * 1440) + (delta_month.seconds / 60)), + pw_constants.UTF8, + ) + ) + hour_timestamp -= _one_hour + + +RESPONSE_MESSAGES = { + b"\x05\x05\x03\x03000AB43C\r\n": ( + "STICK INIT", + b"000000C1", # Success ack + b"0011" # msg_id + + b"0123456789012345" # stick mac + + b"00" # unknown1 + + b"01" # network_is_online + + b"0098765432101234" # circle_plus_mac + + b"4321" # network_id + + b"FF", # unknown2 + ), + b"\x05\x05\x03\x0300230123456789012345A0EC\r\n": ( + "Node Info of stick 0123456789012345", + b"000000C1", # Success ack + b"0024" # msg_id + + b"0123456789012345" # mac + + b"00000000" # datetime + + b"00000000" # log address 0 + + b"00" # relay + + b"80" # hz + + b"653907008512" # hw_ver + + b"4E0843A9" # fw_ver + + b"00", # node_type (Stick) + ), + b"\x05\x05\x03\x03002300987654321012341AE2\r\n": ( + "Node Info of network controller 0098765432101234", + b"000000C1", # Success ack + b"0024" # msg_id + + b"0098765432101234" # mac + + b"22026A68" # datetime + + b"00044280" # log address 20 + + b"01" # relay + + b"01" # hz + + b"000000730007" # hw_ver + + b"4E0843A9" # fw_ver + + b"01", # node_type (Circle+) + ), + b"\x05\x05\x03\x03000D0098765432101234C208\r\n": ( + "ping reply for 0098765432101234", + b"000000C1", # Success ack + b"000E" + + b"0098765432101234" + + b"45" # rssi in + + b"46" # rssi out + + b"0432", # roundtrip + ), + b"\x05\x05\x03\x030018009876543210123400BEF9\r\n": ( + "SCAN 00", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"1111111111111111" + b"00", + ), + b"\x05\x05\x03\x030018009876543210123401AED8\r\n": ( + "SCAN 01", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"2222222222222222" + b"01", + ), + b"\x05\x05\x03\x0300180098765432101234029EBB\r\n": ( + "SCAN 02", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"3333333333333333" + b"02", + ), + b"\x05\x05\x03\x0300180098765432101234038E9A\r\n": ( + "SCAN 03", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"4444444444444444" + b"03", + ), + b"\x05\x05\x03\x030018009876543210123404FE7D\r\n": ( + "SCAN 04", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"5555555555555555" + b"04", + ), + b"\x05\x05\x03\x030018009876543210123405EE5C\r\n": ( + "SCAN 05", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"6666666666666666" + b"05", + ), + b"\x05\x05\x03\x030018009876543210123406DE3F\r\n": ( + "SCAN 06", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"7777777777777777" + b"06", + ), + b"\x05\x05\x03\x030018009876543210123407CE1E\r\n": ( + "SWITCH 01", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"8888888888888888" + b"07", + ), + b"\x05\x05\x03\x0300180098765432101234083FF1\r\n": ( + "SCAN 08", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"08", + ), + b"\x05\x05\x03\x0300180098765432101234092FD0\r\n": ( + "SCAN 09", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"09", + ), + b"\x05\x05\x03\x03001800987654321012340AD04F\r\n": ( + "SCAN 10", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"0A", + ), + b"\x05\x05\x03\x03001800987654321012340BE02C\r\n": ( + "SCAN 11", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"0B", + ), + b"\x05\x05\x03\x03001800987654321012340CF00D\r\n": ( + "SCAN 12", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"0C", + ), + b"\x05\x05\x03\x03001800987654321012340D80EA\r\n": ( + "SCAN 13", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"0D", + ), + b"\x05\x05\x03\x03001800987654321012340E90CB\r\n": ( + "SCAN 14", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"0E", + ), + b"\x05\x05\x03\x03001800987654321012340FA0A8\r\n": ( + "SCAN 15", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"0F", + ), + b"\x05\x05\x03\x0300180098765432101234108DC8\r\n": ( + "SCAN 16", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"10", + ), + b"\x05\x05\x03\x0300180098765432101234119DE9\r\n": ( + "SCAN 17", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"11", + ), + b"\x05\x05\x03\x030018009876543210123412AD8A\r\n": ( + "SCAN 18", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"12", + ), + b"\x05\x05\x03\x030018009876543210123413BDAB\r\n": ( + "SCAN 19", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"13", + ), + b"\x05\x05\x03\x030018009876543210123414CD4C\r\n": ( + "SCAN 20", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"14", + ), + b"\x05\x05\x03\x030018009876543210123415DD6D\r\n": ( + "SCAN 21", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"15", + ), + b"\x05\x05\x03\x030018009876543210123416ED0E\r\n": ( + "SCAN 22", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"16", + ), + b"\x05\x05\x03\x030018009876543210123417FD2F\r\n": ( + "SCAN 23", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"17", + ), + b"\x05\x05\x03\x0300180098765432101234180CC0\r\n": ( + "SCAN 24", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"18", + ), + b"\x05\x05\x03\x0300180098765432101234191CE1\r\n": ( + "SCAN 25", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"19", + ), + b"\x05\x05\x03\x03001800987654321012341AE37E\r\n": ( + "SCAN 26", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"1A", + ), + b"\x05\x05\x03\x03001800987654321012341BD31D\r\n": ( + "SCAN 27", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"1B", + ), + b"\x05\x05\x03\x03001800987654321012341CC33C\r\n": ( + "SCAN 28", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"1C", + ), + b"\x05\x05\x03\x03001800987654321012341DB3DB\r\n": ( + "SCAN 29", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"1D", + ), + b"\x05\x05\x03\x03001800987654321012341EA3FA\r\n": ( + "SCAN 30", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"1E", + ), + b"\x05\x05\x03\x03001800987654321012341F9399\r\n": ( + "SCAN 31", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"1F", + ), + b"\x05\x05\x03\x030018009876543210123420D89B\r\n": ( + "SCAN 32", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"20", + ), + b"\x05\x05\x03\x030018009876543210123421C8BA\r\n": ( + "SCAN 33", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"21", + ), + b"\x05\x05\x03\x030018009876543210123422F8D9\r\n": ( + "SCAN 34", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"22", + ), + b"\x05\x05\x03\x030018009876543210123423E8F8\r\n": ( + "SCAN 35", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"23", + ), + b"\x05\x05\x03\x030018009876543210123424981F\r\n": ( + "SCAN 36", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"24", + ), + b"\x05\x05\x03\x030018009876543210123425883E\r\n": ( + "SCAN 37", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"25", + ), + b"\x05\x05\x03\x030018009876543210123426B85D\r\n": ( + "SCAN 38", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"26", + ), + b"\x05\x05\x03\x030018009876543210123427A87C\r\n": ( + "SCAN 39", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"27", + ), + b"\x05\x05\x03\x0300180098765432101234285993\r\n": ( + "SCAN 40", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"28", + ), + b"\x05\x05\x03\x03001800987654321012342949B2\r\n": ( + "SCAN 41", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"29", + ), + b"\x05\x05\x03\x03001800987654321012342AB62D\r\n": ( + "SCAN 42", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"2A", + ), + b"\x05\x05\x03\x03001800987654321012342B864E\r\n": ( + "SCAN 43", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"2B", + ), + b"\x05\x05\x03\x03001800987654321012342C966F\r\n": ( + "SCAN 44", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"2C", + ), + b"\x05\x05\x03\x03001800987654321012342DE688\r\n": ( + "SCAN 45", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"2D", + ), + b"\x05\x05\x03\x03001800987654321012342EF6A9\r\n": ( + "SCAN 46", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"2E", + ), + b"\x05\x05\x03\x03001800987654321012342FC6CA\r\n": ( + "SCAN 47", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"2F", + ), + b"\x05\x05\x03\x030018009876543210123430EBAA\r\n": ( + "SCAN 48", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"30", + ), + b"\x05\x05\x03\x030018009876543210123431FB8B\r\n": ( + "SCAN 49", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"31", + ), + b"\x05\x05\x03\x030018009876543210123432CBE8\r\n": ( + "SCAN 50", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"32", + ), + b"\x05\x05\x03\x030018009876543210123433DBC9\r\n": ( + "SCAN 51", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"33", + ), + b"\x05\x05\x03\x030018009876543210123434AB2E\r\n": ( + "SCAN 52", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"34", + ), + b"\x05\x05\x03\x030018009876543210123435BB0F\r\n": ( + "SCAN 53", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"35", + ), + b"\x05\x05\x03\x0300180098765432101234368B6C\r\n": ( + "SCAN 54", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"36", + ), + b"\x05\x05\x03\x0300180098765432101234379B4D\r\n": ( + "SCAN 55", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"37", + ), + b"\x05\x05\x03\x0300180098765432101234386AA2\r\n": ( + "SCAN 56", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"38", + ), + b"\x05\x05\x03\x0300180098765432101234397A83\r\n": ( + "SCAN 57", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"39", + ), + b"\x05\x05\x03\x03001800987654321012343A851C\r\n": ( + "SCAN 58", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"3A", + ), + b"\x05\x05\x03\x03001800987654321012343BB57F\r\n": ( + "SCAN 59", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"3B", + ), + b"\x05\x05\x03\x03001800987654321012343CA55E\r\n": ( + "SCAN 60", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"3C", + ), + b"\x05\x05\x03\x03001800987654321012343DD5B9\r\n": ( + "SCAN 61", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"3D", + ), + b"\x05\x05\x03\x03001800987654321012343EC598\r\n": ( + "SCAN 62", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"3E", + ), + b"\x05\x05\x03\x03001800987654321012343FF5FB\r\n": ( + "SCAN 63", + b"000000C1", # Success ack + b"0019" + b"0098765432101234" + b"FFFFFFFFFFFFFFFF" + b"3F", + ), + b"\x05\x05\x03\x03000D11111111111111110B1A\r\n": ( + "ping reply for 1111111111111111", + b"000000C1", # Success ack + b"000E" + + b"1111111111111111" + + b"42" # rssi in 66 + + b"45" # rssi out 69 + + b"0237", # roundtrip 567 + ), + b"\x05\x05\x03\x03000D222222222222222234E3\r\n": ( + "ping reply for 2222222222222222", + b"000000C1", # Success ack + b"000E" + + b"2222222222222222" # mac + + b"44" # rssi in + + b"55" # rssi out + + b"4321", # roundtrip + ), + b"\x05\x05\x03\x03000D333333333333333321B4\r\n": ( + "ping reply for 3333333333333333", + b"000000C1", # Success ack + b"000E" + + b"3333333333333333" # mac + + b"44" # rssi in + + b"55" # rssi out + + b"4321", # roundtrip + ), + b"\x05\x05\x03\x03000D44444444444444444B11\r\n": ( + "ping reply for 4444444444444444", + b"000000C1", # Success ack + b"000E" # msg_id + + b"4444444444444444" # mac + + b"33" # rssi in + + b"44" # rssi out + + b"1234", # roundtrip + ), + b"\x05\x05\x03\x03000D55555555555555555E46\r\n": ( + "ping timeout for 5555555555555555", + b"000000E1", # Timeout + None, + ), + b"\x05\x05\x03\x03000D666666666666666661BF\r\n": ( + "ping timeout for 6666666666666666", + b"000000E1", # Timeout + None, + ), + b"\x05\x05\x03\x03000D777777777777777774E8\r\n": ( + "ping timeout for 7777777777777777", + b"000000E1", # Timeout + None, + ), + b"\x05\x05\x03\x03000D8888888888888888B4F5\r\n": ( + "ping timeout for 8888888888888888", + b"000000E1", # Timeout + None, + ), + b"\x05\x05\x03\x0300231111111111111111D3F0\r\n": ( + "Node info for 1111111111111111", + b"000000C1", # Success ack + b"0024" # msg_id + + b"1111111111111111" # mac + + b"22026A68" # datetime + + b"000442C0" # log address 44000 + + b"01" # relay + + b"01" # hz + + b"000007014000" # hw_ver + + b"4E0844C2" # fw_ver + + b"02", # node_type (Circle) + ), + b"\x05\x05\x03\x0300232222222222222222EC09\r\n": ( + "Node info for 2222222222222222", + b"000000C1", # Success ack + b"0024" # msg_id + + b"2222222222222222" # mac + + b"22026A68" # datetime + + b"00044300" # log address + + b"01" # relay + + b"01" # hz + + b"000009001100" # hw_ver + + b"4EB28FD5" # fw_ver + + b"09", # node_type (Stealth - Legrand) + ), + b"\x05\x05\x03\x03013822222222222222220000265D\r\n": ( + "Get Node relay init state for 2222222222222222", + b"000000C1", # Success ack + b"0139" # msg_id + + b"2222222222222222" # mac + + b"00" # is_get + + b"01", # relay config + ), + b"\x05\x05\x03\x03013822222222222222220100116D\r\n": ( + "Set Node relay init state off for 2222222222222222", + b"000000C1", # Success ack + b"0139" # msg_id + + b"2222222222222222" # mac + + b"01" # is_get + + b"00", # relay config + ), + b"\x05\x05\x03\x03013822222222222222220101014C\r\n": ( + "Set Node relay init state on for 2222222222222222", + b"000000C1", # Success ack + b"0139" # msg_id + + b"2222222222222222" # mac + + b"01" # is_get + + b"01", # relay config + ), + b"\x05\x05\x03\x0300233333333333333333F95E\r\n": ( + "Node info for 3333333333333333", + b"000000C1", # Success ack + b"0024" # msg_id + + b"3333333333333333" # mac + + b"22026A68" # datetime + + b"00044340" # log address + + b"01" # relay + + b"01" # hz + + b"000007007300" # hw_ver + + b"4DCCDB7B" # fw_ver + + b"02", # node_type (Circle) + ), + b"\x05\x05\x03\x030023444444444444444493FB\r\n": ( + "Node info for 4444444444444444", + b"000000C1", # Success ack + b"0024" # msg_id + + b"4444444444444444" # mac + + b"22026A68" # datetime + + b"000443C0" # log address + + b"01" # relay + + b"01" # hz + + b"000007007300" # hw_ver + + b"4E0844C2" # fw_ver + + b"02", # node_type (Circle) + ), + b"\x05\x05\x03\x03002600987654321012344988\r\n": ( + "Calibration for 0098765432101234", + b"000000C1", # Success ack + b"0027" # msg_id + + b"0098765432101234" # mac + + b"3F80308E" # gain_a + + b"B66CF94F" # gain_b + + b"00000000" # off_tot + + b"BD14BFEC", # off_noise + ), + b"\x05\x05\x03\x0300261111111111111111809A\r\n": ( + "Calibration for 1111111111111111", + b"000000C1", # Success ack + b"0027" # msg_id + + b"1111111111111111" # mac + + b"3F7AE254" # gain_a + + b"B638FFB4" # gain_b + + b"00000000" # off_tot + + b"BC726F67", # off_noise + ), + b"\x05\x05\x03\x0300262222222222222222BF63\r\n": ( + "Calibration for 2222222222222222", + b"000000C1", # Success ack + b"0027" # msg_id + + b"2222222222222222" # mac + + b"3F806192" # gain_a + + b"B56D8019" # gain_b + + b"00000000" # off_tot + + b"BB4FA127", # off_noise + ), + b"\x05\x05\x03\x0300263333333333333333AA34\r\n": ( + "Calibration for 3333333333333333", + b"000000C1", # Success ack + b"0027" # msg_id + + b"3333333333333333" # mac + + b"3F7D8AC6" # gain_a + + b"B5F45E13" # gain_b + + b"00000000" # off_tot + + b"3CC3A53F", # off_noise + ), + b"\x05\x05\x03\x0300264444444444444444C091\r\n": ( + "Calibration for 4444444444444444", + b"000000C1", # Success ack + b"0027" # msg_id + + b"4444444444444444" # mac + + b"3F7D8AC6" # gain_a + + b"B5F45E13" # gain_b + + b"00000000" # off_tot + + b"3CC3A53F", # off_noise + ), + b"\x05\x05\x03\x03013844444444444444440000265D\r\n": ( + "Get Node relay init state for 4444444444444444", + b"000000C1", # Success ack + b"0139" # msg_id + + b"4444444444444444" # mac + + b"00" # is_get + + b"01", # relay config + ), + b"\x05\x05\x03\x0300290098765432101234BC36\r\n": ( + "Realtime clock for 0098765432101234", + b"000000C1", # Success ack + b"003A" # msg_id + + b"0098765432101234" # mac + + bytes(("%%0%dd" % 2) % utc_now.second, pw_constants.UTF8) + + bytes(("%%0%dd" % 2) % utc_now.minute, pw_constants.UTF8) + + bytes(("%%0%dd" % 2) % utc_now.hour, pw_constants.UTF8) + + bytes(("%%0%dd" % 2) % utc_now.weekday(), pw_constants.UTF8) + + bytes(("%%0%dd" % 2) % utc_now.day, pw_constants.UTF8) + + bytes(("%%0%dd" % 2) % utc_now.month, pw_constants.UTF8) + + bytes(("%%0%dd" % 2) % (utc_now.year - 2000), pw_constants.UTF8), + ), + b"\x05\x05\x03\x03003E11111111111111111B8A\r\n": ( + "clock for 0011111111111111", + b"000000C1", # Success ack + b"003F" # msg_id + + b"1111111111111111" # mac + + bytes(("%%0%dX" % 2) % utc_now.hour, pw_constants.UTF8) + + bytes(("%%0%dX" % 2) % utc_now.minute, pw_constants.UTF8) + + bytes(("%%0%dX" % 2) % utc_now.second, pw_constants.UTF8) + + bytes(("%%0%dX" % 2) % utc_now.weekday(), pw_constants.UTF8) + + b"00" # unknown + + b"0000", # unknown2 + ), + b"\x05\x05\x03\x03003E22222222222222222473\r\n": ( + "clock for 2222222222222222", + b"000000C1", # Success ack + b"003F" # msg_id + + b"2222222222222222" # mac + + bytes(("%%0%dX" % 2) % utc_now.hour, pw_constants.UTF8) + + bytes(("%%0%dX" % 2) % utc_now.minute, pw_constants.UTF8) + + bytes(("%%0%dX" % 2) % utc_now.second, pw_constants.UTF8) + + bytes(("%%0%dX" % 2) % utc_now.weekday(), pw_constants.UTF8) + + b"00" # unknown + + b"0000", # unknown2 + ), + b"\x05\x05\x03\x03003E33333333333333333124\r\n": ( + "clock for 3333333333333333", + b"000000C1", # Success ack + b"003F" # msg_id + + b"3333333333333333" # mac + + bytes(("%%0%dX" % 2) % utc_now.hour, pw_constants.UTF8) + + bytes(("%%0%dX" % 2) % utc_now.minute, pw_constants.UTF8) + + bytes(("%%0%dX" % 2) % utc_now.second, pw_constants.UTF8) + + bytes(("%%0%dX" % 2) % utc_now.weekday(), pw_constants.UTF8) + + b"00" # unknown + + b"0000", # unknown2 + ), + b"\x05\x05\x03\x03003E44444444444444445B81\r\n": ( + "clock for 4444444444444444", + b"000000C1", # Success ack + b"003F" # msg_id + + b"4444444444444444" # mac + + bytes(("%%0%dX" % 2) % utc_now.hour, pw_constants.UTF8) + + bytes(("%%0%dX" % 2) % utc_now.minute, pw_constants.UTF8) + + bytes(("%%0%dX" % 2) % utc_now.second, pw_constants.UTF8) + + bytes(("%%0%dX" % 2) % utc_now.weekday(), pw_constants.UTF8) + + b"00" # unknown + + b"0000", # unknown2 + ), + b"\x05\x05\x03\x03001700987654321012340104F9\r\n": ( + "Relay on for 0098765432101234", + b"000000C1", # Success ack + b"0000" # msg_id + + b"00D8" # ack id for RelaySwitchedOn + + b"0098765432101234", # mac + ), + b"\x05\x05\x03\x03001700987654321012340014D8\r\n": ( + "Relay off for 0098765432101234", + b"000000C1", # Success ack + b"0000" # msg_id + + b"00DE" # ack id for RelaySwitchedOff + + b"0098765432101234", # mac + ), + b"\x05\x05\x03\x030023555555555555555586AC\r\n": ( + "Node info for 5555555555555555", + b"000000C1", # Success ack + b"0024" # msg_id + + b"5555555555555555" # mac + + b"22026A68" # datetime + + b"00000000" # log address + + b"00" # relay + + b"01" # hz + + b"000008000700" # hw_ver + + b"4E084590" # fw_ver + + b"06", # node_type (Scan) + ), + b"\x05\x05\x03\x03002388888888888888886C1F\r\n": ( + "Node info for 8888888888888888", + b"000000C1", # Success ack + b"0024" # msg_id + + b"8888888888888888" # mac + + b"22026A68" # datetime + + b"00000000" # log address + + b"00" # relay + + b"01" # hz + + b"000007005100" # hw_ver + + b"4E08478A" # fw_ver + + b"03", # node_type (Switch) + ), + b"\x05\x05\x03\x03001200987654321012340A72\r\n": ( + "Power usage for 0098765432101234", + b"000000C1", # Success ack + b"0013" # msg_id + + b"0098765432101234" # mac + + b"000A" # pulses 1s + + b"FF9A" # pulses 8s + + b"00001234" + + b"00000000" + + b"0004", + ), + b"\x05\x05\x03\x0300480098765432101234000442808C54\r\n": ( + "Energy log for 0098765432101234 @ log ADDRESS 20", + b"000000C1", # Success ack + b"0049" # msg_id + + b"0098765432101234" # mac + + LOG_TIMESTAMPS[2] # datetime + + b"00000000" + + LOG_TIMESTAMPS[1] # datetime + + b"00111111" + + LOG_TIMESTAMPS[0] # datetime + + b"00111111" + + b"FFFFFFFF" # datetime + + b"00000000" + + b"00044280", # log address + ), + b"\x05\x05\x03\x030048009876543210123400044260AF5B\r\n": ( + "Energy log for 0098765432101234 @ log ADDRESS 19", + b"000000C1", # Success ack + b"0049" # msg_id + + b"0098765432101234" # mac + + LOG_TIMESTAMPS[6] # datetime + + b"00000000" + + LOG_TIMESTAMPS[5] # datetime + + b"00000000" + + LOG_TIMESTAMPS[4] # datetime + + b"00000000" + + LOG_TIMESTAMPS[3] # datetime + + b"00000000" + + b"00044260", + ), + b"\x05\x05\x03\x030048009876543210123400044240C939\r\n": ( + "Energy log for 0098765432101234 @ log ADDRESS 18", + b"000000C1", # Success ack + b"0049" # msg_id + + b"0098765432101234" # mac + + LOG_TIMESTAMPS[10] # datetime + + b"00000000" + + LOG_TIMESTAMPS[9] # datetime + + b"00000000" + + LOG_TIMESTAMPS[8] # datetime + + b"00000000" + + LOG_TIMESTAMPS[7] # datetime + + b"00000000" + + b"00044240", + ), + b"\x05\x05\x03\x030048009876543210123400044220639F\r\n": ( + "Energy log for 0098765432101234 @ log ADDRESS 17", + b"000000C1", # Success ack + b"0049" # msg_id + + b"0098765432101234" # mac + + LOG_TIMESTAMPS[14] # datetime + + b"00000000" + + LOG_TIMESTAMPS[13] # datetime + + b"00000000" + + LOG_TIMESTAMPS[12] # datetime + + b"00000000" + + LOG_TIMESTAMPS[11] # datetime + + b"00000000" + + b"00044220", + ), + b"\x05\x05\x03\x03004800987654321012340004420005FD\r\n": ( + "Energy log for 0098765432101234 @ log ADDRESS 16", + b"000000C1", # Success ack + b"0049" # msg_id + + b"0098765432101234" # mac + + LOG_TIMESTAMPS[18] # datetime + + b"00000000" + + LOG_TIMESTAMPS[17] # datetime + + b"00000000" + + LOG_TIMESTAMPS[16] # datetime + + b"00000000" + + LOG_TIMESTAMPS[15] # datetime + + b"00000000" + + b"00044200", + ), + b"\x05\x05\x03\x0300480098765432101234000441E0AB01\r\n": ( + "Energy log for 0098765432101234 @ log ADDRESS 15", + b"000000C1", # Success ack + b"0049" # msg_id + + b"0098765432101234" # mac + + LOG_TIMESTAMPS[22] # datetime + + b"00000000" + + LOG_TIMESTAMPS[21] # datetime + + b"00000000" + + LOG_TIMESTAMPS[20] # datetime + + b"00000000" + + LOG_TIMESTAMPS[19] # datetime + + b"00000000" + + b"000441E0", + ), + b"\x05\x05\x03\x0300480098765432101234000441C001A7\r\n": ( + "Energy log for 0098765432101234 @ log ADDRESS 14", + b"000000C1", # Success ack + b"0049" # msg_id + + b"0098765432101234" # mac + + LOG_TIMESTAMPS[26] # datetime + + b"00001234" + + LOG_TIMESTAMPS[25] # datetime + + b"00000080" + + LOG_TIMESTAMPS[24] # datetime + + b"00000050" + + LOG_TIMESTAMPS[23] # datetime + + b"00000000" + + b"000441C0", + ), + b"\x05\x05\x03\x0300480098765432101234000441A067C5\r\n": ( + "Energy log for 0098765432101234 @ log ADDRESS 13", + b"000000C1", # Success ack + b"0049" # msg_id + + b"0098765432101234" # mac + + LOG_TIMESTAMPS[30] # datetime + + b"00000512" + + LOG_TIMESTAMPS[29] # datetime + + b"00001224" + + LOG_TIMESTAMPS[28] # datetime + + b"00000888" + + LOG_TIMESTAMPS[27] # datetime + + b"00009999" + + b"000441A0", + ), + b"\x05\x05\x03\x030048009876543210123400044180D504\r\n": ( + "Energy log for 0098765432101234 @ log ADDRESS 12", + b"000000C1", # Success ack + b"0049" # msg_id + + b"0098765432101234" # mac + + LOG_TIMESTAMPS[34] # datetime + + b"00000212" + + LOG_TIMESTAMPS[33] # datetime + + b"00001664" + + LOG_TIMESTAMPS[32] # datetime + + b"00000338" + + LOG_TIMESTAMPS[31] # datetime + + b"00001299" + + b"00044180", + ), + b"\x05\x05\x03\x030048009876543210123400044160F60B\r\n": ( + "Energy log for 0098765432101234 @ log ADDRESS 11", + b"000000C1", # Success ack + b"0049" # msg_id + + b"0098765432101234" # mac + + LOG_TIMESTAMPS[38] # datetime + + b"00001512" + + LOG_TIMESTAMPS[37] # datetime + + b"00004324" + + LOG_TIMESTAMPS[36] # datetime + + b"00000338" + + LOG_TIMESTAMPS[35] # datetime + + b"00006666" + + b"00044160", + ), + b"\x05\x05\x03\x0300480098765432101234000441409069\r\n": ( + "Energy log for 0098765432101234 @ log ADDRESS 10", + b"000000C1", # Success ack + b"0049" # msg_id + + b"0098765432101234" # mac + + LOG_TIMESTAMPS[42] # datetime + + b"00001542" + + LOG_TIMESTAMPS[41] # datetime + + b"00004366" + + LOG_TIMESTAMPS[40] # datetime + + b"00000638" + + LOG_TIMESTAMPS[39] # datetime + + b"00005231" + + b"00044140", + ), + b"\x05\x05\x03\x0300480098765432101234000441203ACF\r\n": ( + "Energy log for 0098765432101234 @ log ADDRESS 9", + b"000000C1", # Success ack + b"0049" # msg_id + + b"0098765432101234" # mac + + LOG_TIMESTAMPS[46] # datetime + + b"00001542" + + LOG_TIMESTAMPS[45] # datetime + + b"00004366" + + LOG_TIMESTAMPS[44] # datetime + + b"00000638" + + LOG_TIMESTAMPS[43] # datetime + + b"00005231" + + b"00044120", + ), + b"\x05\x05\x03\x0300480098765432101234000440E09C31\r\n": ( + "Energy log for 0098765432101234 @ log ADDRESS 8", + b"000000C1", # Success ack + b"0049" # msg_id + + b"0098765432101234" # mac + + LOG_TIMESTAMPS[50] # datetime + + b"00001542" + + LOG_TIMESTAMPS[49] # datetime + + b"00004366" + + LOG_TIMESTAMPS[48] # datetime + + b"00000638" + + LOG_TIMESTAMPS[47] # datetime + + b"00005231" + + b"000440E0", + ), + b"\x05\x05\x03\x0300480098765432101234000440C03697\r\n": ( + "Energy log for 0098765432101234 @ log ADDRESS 7", + b"000000C1", # Success ack + b"0049" # msg_id + + b"0098765432101234" # mac + + LOG_TIMESTAMPS[54] # datetime + + b"00001542" + + LOG_TIMESTAMPS[53] # datetime + + b"00004366" + + LOG_TIMESTAMPS[52] # datetime + + b"00000638" + + LOG_TIMESTAMPS[51] # datetime + + b"00005231" + + b"000440C0", + ), + b"\x05\x05\x03\x0300480098765432101234000440A050F5\r\n": ( + "Energy log for 0098765432101234 @ log ADDRESS 6", + b"000000C1", # Success ack + b"0049" # msg_id + + b"0098765432101234" # mac + + LOG_TIMESTAMPS[58] # datetime + + b"00001542" + + LOG_TIMESTAMPS[57] # datetime + + b"00004366" + + LOG_TIMESTAMPS[56] # datetime + + b"00000638" + + LOG_TIMESTAMPS[55] # datetime + + b"00005231" + + b"000440A0", + ), + b"\x05\x05\x03\x030048009876543210123400044080E234\r\n": ( + "Energy log for 0098765432101234 @ log ADDRESS 5", + b"000000C1", # Success ack + b"0049" # msg_id + + b"0098765432101234" # mac + + LOG_TIMESTAMPS[62] # datetime + + b"00001542" + + LOG_TIMESTAMPS[61] # datetime + + b"00004366" + + LOG_TIMESTAMPS[60] # datetime + + b"00000638" + + LOG_TIMESTAMPS[59] # datetime + + b"00005231" + + b"00044080", + ), + b"\x05\x05\x03\x030048009876543210123400044060C13B\r\n": ( + "Energy log for 0098765432101234 @ log ADDRESS 4", + b"000000C1", # Success ack + b"0049" # msg_id + + b"0098765432101234" # mac + + LOG_TIMESTAMPS[66] # datetime + + b"00001542" + + LOG_TIMESTAMPS[65] # datetime + + b"00004366" + + LOG_TIMESTAMPS[64] # datetime + + b"00000638" + + LOG_TIMESTAMPS[63] # datetime + + b"00005231" + + b"00044060", + ), + b"\x05\x05\x03\x030048009876543210123400044040A759\r\n": ( + "Energy log for 0098765432101234 @ log ADDRESS 3", + b"000000C1", # Success ack + b"0049" # msg_id + + b"0098765432101234" # mac + + LOG_TIMESTAMPS[70] # datetime + + b"00001542" + + LOG_TIMESTAMPS[69] # datetime + + b"00004366" + + LOG_TIMESTAMPS[68] # datetime + + b"00000638" + + LOG_TIMESTAMPS[67] # datetime + + b"00005231" + + b"00044040", + ), + b"\x05\x05\x03\x0300480098765432101234000440200DFF\r\n": ( + "Energy log for 0098765432101234 @ log ADDRESS 2", + b"000000C1", # Success ack + b"0049" # msg_id + + b"0098765432101234" # mac + + LOG_TIMESTAMPS[74] # datetime + + b"00001542" + + LOG_TIMESTAMPS[73] # datetime + + b"00004366" + + LOG_TIMESTAMPS[72] # datetime + + b"00000638" + + LOG_TIMESTAMPS[71] # datetime + + b"00005231" + + b"00044020", + ), + b"\x05\x05\x03\x0300480098765432101234000441005CAD\r\n": ( + "Energy log for 0098765432101234 @ log ADDRESS 1", + b"000000C1", # Success ack + b"0049" # msg_id + + b"0098765432101234" # mac + + LOG_TIMESTAMPS[78] # datetime + + b"00001542" + + LOG_TIMESTAMPS[77] # datetime + + b"00004366" + + LOG_TIMESTAMPS[76] # datetime + + b"00000638" + + LOG_TIMESTAMPS[75] # datetime + + b"00005231" + + b"00044100", + ), + b"\x05\x05\x03\x0300121111111111111111C360\r\n": ( + "Power usage for 1111111111111111", + b"000000C1", # Success ack + b"0013" # msg_id + + b"1111111111111111" # mac + + b"005A" # pulses 1s + + b"0098" # pulses 8s + + b"00008787" + + b"00008123" + + b"0004", + ), + b"\x05\x05\x03\x0300481111111111111111000442C05D37\r\n": ( + "Energy log for 1111111111111111 @ log ADDRESS 20", + b"000000C1", # Success ack + b"0049" # msg_id + + b"1111111111111111" # mac + + LOG_TIMESTAMPS[0] # datetime + + b"00222222" + + LOG_TIMESTAMPS[0] # datetime + + b"00111111" + + b"FFFFFFFF" # datetime + + b"00000000" + + b"FFFFFFFF" # datetime + + b"00000000" + + b"000442C0", # log address + ), + b"\x05\x05\x03\x0300481111111111111111000442A03B55\r\n": ( + "Energy log for 1111111111111111 @ log ADDRESS 19", + b"000000C1", # Success ack + b"0049" # msg_id + + b"1111111111111111" # mac + + LOG_TIMESTAMPS[2] # datetime + + b"00002000" + + LOG_TIMESTAMPS[2] # datetime + + b"00001000" + + LOG_TIMESTAMPS[1] # datetime + + b"00000500" + + LOG_TIMESTAMPS[1] # datetime + + b"00000250" + + b"000442A0", + ), + b"\x05\x05\x03\x0300481111111111111111000442808994\r\n": ( + "Energy log for 1111111111111111 @ log ADDRESS 18", + b"000000C1", # Success ack + b"0049" # msg_id + + b"1111111111111111" # mac + + LOG_TIMESTAMPS[4] # datetime + + b"00000000" + + LOG_TIMESTAMPS[4] # datetime + + b"00000000" + + LOG_TIMESTAMPS[3] # datetime + + b"00008000" + + LOG_TIMESTAMPS[3] # datetime + + b"00004000" + + b"00044280", + ), + b"\x05\x05\x03\x030048111111111111111100044260AA9B\r\n": ( + "Energy log for 1111111111111111 @ log ADDRESS 17", + b"000000C1", # Success ack + b"0049" # msg_id + + b"1111111111111111" # mac + + LOG_TIMESTAMPS[6] # datetime + + b"00000800" + + LOG_TIMESTAMPS[6] # datetime + + b"00000400" + + LOG_TIMESTAMPS[5] # datetime + + b"00040000" + + LOG_TIMESTAMPS[5] # datetime + + b"00020000" + + b"00044260", + ), + b"\x05\x05\x03\x030048111111111111111100044240CCF9\r\n": ( + "Energy log for 1111111111111111 @ log ADDRESS 16", + b"000000C1", # Success ack + b"0049" # msg_id + + b"1111111111111111" # mac + + LOG_TIMESTAMPS[8] # datetime + + b"00000000" + + LOG_TIMESTAMPS[8] # datetime + + b"00000000" + + LOG_TIMESTAMPS[7] # datetime + + b"00000000" + + LOG_TIMESTAMPS[7] # datetime + + b"00000000" + + b"00044240", + ), + b"\x05\x05\x03\x030048111111111111111100044220665F\r\n": ( + "Energy log for 1111111111111111 @ log ADDRESS 14", + b"000000C1", # Success ack + b"0049" # msg_id + + b"1111111111111111" # mac + + LOG_TIMESTAMPS[10] # datetime + + b"00004444" + + LOG_TIMESTAMPS[10] # datetime + + b"00002222" + + LOG_TIMESTAMPS[9] # datetime + + b"00011111" + + LOG_TIMESTAMPS[9] # datetime + + b"00022222" + + b"00044220", + ), + b"\x05\x05\x03\x030048111111111111111100044200003D\r\n": ( + "Energy log for 1111111111111111 @ log ADDRESS 13", + b"000000C1", # Success ack + b"0049" # msg_id + + b"1111111111111111" # mac + + LOG_TIMESTAMPS[12] # datetime + + b"00000660" + + LOG_TIMESTAMPS[12] # datetime + + b"00000330" + + LOG_TIMESTAMPS[11] # datetime + + b"00006400" + + LOG_TIMESTAMPS[11] # datetime + + b"00003200" + + b"00044200", # log address + ), + b"\x05\x05\x03\x0300481111111111111111000441E0AEC1\r\n": ( + "Energy log for 1111111111111111 @ log ADDRESS 12", + b"000000C1", # Success ack + b"0049" # msg_id + + b"1111111111111111" # mac + + LOG_TIMESTAMPS[12] # datetime + + b"00000660" + + LOG_TIMESTAMPS[12] # datetime + + b"00000330" + + LOG_TIMESTAMPS[11] # datetime + + b"00006400" + + LOG_TIMESTAMPS[11] # datetime + + b"00003200" + + b"000441E0", # log address + ), + b"\x05\x05\x03\x0300481111111111111111000441C00467\r\n": ( + "Energy log for 1111111111111111 @ log ADDRESS 11", + b"000000C1", # Success ack + b"0049" # msg_id + + b"1111111111111111" # mac + + LOG_TIMESTAMPS[14] # datetime + + b"00000512" + + LOG_TIMESTAMPS[14] # datetime + + b"00000254" + + LOG_TIMESTAMPS[13] # datetime + + b"00000888" + + LOG_TIMESTAMPS[13] # datetime + + b"00000444" + + b"000441C0", + ), + b"\x05\x05\x03\x0300481111111111111111000441A06205\r\n": ( + "Energy log for 1111111111111111 @ log ADDRESS 10", + b"000000C1", # Success ack + b"0049" # msg_id + + b"1111111111111111" # mac + + LOG_TIMESTAMPS[16] # datetime + + b"00000512" + + LOG_TIMESTAMPS[16] # datetime + + b"00001224" + + LOG_TIMESTAMPS[15] # datetime + + b"00000888" + + LOG_TIMESTAMPS[15] # datetime + + b"00009999" + + b"000441A0", + ), + b"\x05\x05\x03\x030048111111111111111100044180D0C4\r\n": ( + "Energy log for 1111111111111111 @ log ADDRESS 9", + b"000000C1", # Success ack + b"0049" # msg_id + + b"1111111111111111" # mac + + LOG_TIMESTAMPS[18] # datetime + + b"00000512" + + LOG_TIMESTAMPS[18] # datetime + + b"00001224" + + LOG_TIMESTAMPS[17] # datetime + + b"00000888" + + LOG_TIMESTAMPS[17] # datetime + + b"00000444" + + b"00044180", + ), + b"\x05\x05\x03\x030048111111111111111100044160F3CB\r\n": ( + "Energy log for 1111111111111111 @ log ADDRESS 8", + b"000000C1", # Success ack + b"0049" # msg_id + + b"1111111111111111" # mac + + LOG_TIMESTAMPS[20] # datetime + + b"00006666" + + LOG_TIMESTAMPS[20] # datetime + + b"00003333" + + LOG_TIMESTAMPS[19] # datetime + + b"00004848" + + LOG_TIMESTAMPS[19] # datetime + + b"00002424" + + b"00044160", + ), + b"\x05\x05\x03\x03004811111111111111110004414095A9\r\n": ( + "Energy log for 1111111111111111 @ log ADDRESS 7", + b"000000C1", # Success ack + b"0049" # msg_id + + b"1111111111111111" # mac + + LOG_TIMESTAMPS[22] # datetime + + b"00000512" + + LOG_TIMESTAMPS[22] # datetime + + b"00001224" + + LOG_TIMESTAMPS[21] # datetime + + b"00000888" + + LOG_TIMESTAMPS[21] # datetime + + b"00009999" + + b"00044140", + ), + b"\x05\x05\x03\x0300481111111111111111000441203F0F\r\n": ( + "Energy log for 1111111111111111 @ log ADDRESS 6", + b"000000C1", # Success ack + b"0049" # msg_id + + b"1111111111111111" # mac + + LOG_TIMESTAMPS[25] # datetime + + b"00001024" + + LOG_TIMESTAMPS[25] # datetime + + b"00000512" + + LOG_TIMESTAMPS[24] # datetime + + b"00004646" + + LOG_TIMESTAMPS[24] # datetime + + b"00002323" + + b"00044120", + ), + b"\x05\x05\x03\x030048111111111111111100044100596D\r\n": ( + "Energy log for 1111111111111111 @ log ADDRESS 5", + b"000000C1", # Success ack + b"0049" # msg_id + + b"1111111111111111" # mac + + LOG_TIMESTAMPS[27] # datetime + + b"00001024" + + LOG_TIMESTAMPS[27] # datetime + + b"00000512" + + LOG_TIMESTAMPS[26] # datetime + + b"00004646" + + LOG_TIMESTAMPS[26] # datetime + + b"00002323" + + b"00044100", + ), + b"\x05\x05\x03\x0300481111111111111111000440E099F1\r\n": ( + "Energy log for 1111111111111111 @ log ADDRESS 4", + b"000000C1", # Success ack + b"0049" # msg_id + + b"1111111111111111" # mac + + LOG_TIMESTAMPS[29] # datetime + + b"00001024" + + LOG_TIMESTAMPS[29] # datetime + + b"00000512" + + LOG_TIMESTAMPS[28] # datetime + + b"00004646" + + LOG_TIMESTAMPS[28] # datetime + + b"00002323" + + b"000440E0", + ), + b"\x05\x05\x03\x0300481111111111111111000440C03357\r\n": ( + "Energy log for 1111111111111111 @ log ADDRESS 3", + b"000000C1", # Success ack + b"0049" # msg_id + + b"1111111111111111" # mac + + LOG_TIMESTAMPS[31] # datetime + + b"00001024" + + LOG_TIMESTAMPS[31] # datetime + + b"00000512" + + LOG_TIMESTAMPS[30] # datetime + + b"00004646" + + LOG_TIMESTAMPS[30] # datetime + + b"00002323" + + b"000440C0", + ), + b"\x05\x05\x03\x0300481111111111111111000440A05535\r\n": ( + "Energy log for 1111111111111111 @ log ADDRESS 2", + b"000000C1", # Success ack + b"0049" # msg_id + + b"1111111111111111" # mac + + LOG_TIMESTAMPS[33] # datetime + + b"00001024" + + LOG_TIMESTAMPS[33] # datetime + + b"00000512" + + LOG_TIMESTAMPS[32] # datetime + + b"00004646" + + LOG_TIMESTAMPS[32] # datetime + + b"00002323" + + b"000440A0", + ), + b"\x05\x05\x03\x030048111111111111111100044080E7F4\r\n": ( + "Energy log for 1111111111111111 @ log ADDRESS 1", + b"000000C1", # Success ack + b"0049" # msg_id + + b"1111111111111111" # mac + + LOG_TIMESTAMPS[35] # datetime + + b"00001024" + + LOG_TIMESTAMPS[35] # datetime + + b"00000512" + + LOG_TIMESTAMPS[34] # datetime + + b"00004646" + + LOG_TIMESTAMPS[34] # datetime + + b"00002323" + + b"00044080", + ), + b"\x05\x05\x03\x030048111111111111111100044060C4FB\r\n": ( + "Energy log for 1111111111111111 @ log ADDRESS 1", + b"000000C1", # Success ack + b"0049" # msg_id + + b"1111111111111111" # mac + + LOG_TIMESTAMPS[38] # datetime + + b"00001024" + + LOG_TIMESTAMPS[38] # datetime + + b"00000512" + + LOG_TIMESTAMPS[36] # datetime + + b"00004646" + + LOG_TIMESTAMPS[36] # datetime + + b"00002323" + + b"00044060", + ), + b"\x05\x05\x03\x030048111111111111111100044040A299\r\n": ( + "Energy log for 1111111111111111 @ log ADDRESS 1", + b"000000C1", # Success ack + b"0049" # msg_id + + b"1111111111111111" # mac + + LOG_TIMESTAMPS[41] # datetime + + b"00001024" + + LOG_TIMESTAMPS[41] # datetime + + b"00000512" + + LOG_TIMESTAMPS[40] # datetime + + b"00004646" + + LOG_TIMESTAMPS[40] # datetime + + b"00002323" + + b"00044040", + ), + b"\x05\x05\x03\x030048111111111111111100044020083F\r\n": ( + "Energy log for 1111111111111111 @ log ADDRESS 6", + b"000000C1", # Success ack + b"0049" # msg_id + + b"1111111111111111" # mac + + LOG_TIMESTAMPS[43] # datetime + + b"00000512" + + LOG_TIMESTAMPS[43] # datetime + + b"00000254" + + LOG_TIMESTAMPS[42] # datetime + + b"00000888" + + LOG_TIMESTAMPS[42] # datetime + + b"00000444" + + b"00044020", + ), +} + + +PARTLY_RESPONSE_MESSAGES = { + b"\x05\x05\x03\x0300161111111111111111": ( + "Clock set 1111111111111111", + b"000000C1", # Success ack + b"0000" + b"00D7" + b"1111111111111111", # msg_id, ClockAccepted, mac + ), + b"\x05\x05\x03\x0300162222222222222222": ( + "Clock set 2222222222222222", + b"000000C1", # Success ack + b"0000" + b"00D7" + b"2222222222222222", # msg_id, ClockAccepted, mac + ), + b"\x05\x05\x03\x0300163333333333333333": ( + "Clock set 3333333333333333", + b"000000C1", # Success ack + b"0000" + b"00D7" + b"3333333333333333", # msg_id, ClockAccepted, mac + ), + b"\x05\x05\x03\x0300164444444444444444": ( + "Clock set 4444444444444444", + b"000000C1", # Success ack + b"0000" + b"00D7" + b"4444444444444444", # msg_id, ClockAccepted, mac + ), +} + +SECOND_RESPONSE_MESSAGES = { + b"\x05\x05\x03\x03000D55555555555555555E46\r\n": ( + "ping reply for 5555555555555555", + b"000000C1", # Success ack + b"000E" + + b"5555555555555555" # mac + + b"44" # rssi in + + b"33" # rssi out + + b"0055", # roundtrip + ) +} diff --git a/tests/test_usb.py b/tests/test_usb.py index bcffb953b..e39d5823e 100644 --- a/tests/test_usb.py +++ b/tests/test_usb.py @@ -1,17 +1,2636 @@ -# pylint: disable=protected-access -"""Test Plugwise Stick features.""" +"""Test plugwise USB Stick.""" +import asyncio +from collections.abc import Callable, Coroutine +from datetime import UTC, datetime as dt, timedelta as td import importlib +import logging +import random +from typing import Any +from unittest.mock import MagicMock, Mock, patch + +import pytest + +import aiofiles # type: ignore[import-untyped] +import crcmod +from freezegun import freeze_time + +crc_fun = crcmod.mkCrcFun(0x11021, rev=False, initCrc=0x0000, xorOut=0x0000) -pw_constants = importlib.import_module("plugwise_usb.constants") -pw_exceptions = importlib.import_module("plugwise_usb.exceptions") pw_stick = importlib.import_module("plugwise_usb") +pw_api = importlib.import_module("plugwise_usb.api") +pw_exceptions = importlib.import_module("plugwise_usb.exceptions") +pw_connection = importlib.import_module("plugwise_usb.connection") +pw_connection_manager = importlib.import_module("plugwise_usb.connection.manager") +pw_constants = importlib.import_module("plugwise_usb.constants") +pw_helpers_cache = importlib.import_module("plugwise_usb.helpers.cache") +pw_network_cache = importlib.import_module("plugwise_usb.network.cache") +pw_node_cache = importlib.import_module("plugwise_usb.nodes.helpers.cache") +pw_receiver = importlib.import_module("plugwise_usb.connection.receiver") +pw_sender = importlib.import_module("plugwise_usb.connection.sender") +pw_requests = importlib.import_module("plugwise_usb.messages.requests") +pw_responses = importlib.import_module("plugwise_usb.messages.responses") +pw_msg_properties = importlib.import_module("plugwise_usb.messages.properties") +pw_userdata = importlib.import_module("stick_test_data") +pw_node = importlib.import_module("plugwise_usb.nodes.node") +pw_circle = importlib.import_module("plugwise_usb.nodes.circle") +pw_sed = importlib.import_module("plugwise_usb.nodes.sed") +pw_scan = importlib.import_module("plugwise_usb.nodes.scan") +pw_switch = importlib.import_module("plugwise_usb.nodes.switch") +pw_energy_counter = importlib.import_module("plugwise_usb.nodes.helpers.counter") +pw_energy_calibration = importlib.import_module("plugwise_usb.nodes.helpers") +pw_energy_pulses = importlib.import_module("plugwise_usb.nodes.helpers.pulses") + +_LOGGER = logging.getLogger(__name__) +_LOGGER.setLevel(logging.DEBUG) + + +def inc_seq_id(seq_id: bytes | None) -> bytes: + """Increment sequence id.""" + if seq_id is None: + return b"0000" + temp_int = int(seq_id, 16) + 1 + if temp_int >= 65532: + temp_int = 0 + temp_str = str(hex(temp_int)).lstrip("0x").upper() + while len(temp_str) < 4: + temp_str = "0" + temp_str + return temp_str.encode() + + +def construct_message(data: bytes, seq_id: bytes = b"0000") -> bytes: + """Construct plugwise message.""" + body = data[:4] + seq_id + data[4:] + return bytes( + pw_constants.MESSAGE_HEADER + + body + + bytes(f"{crc_fun(body):04X}", pw_constants.UTF8) + + pw_constants.MESSAGE_FOOTER + ) + + +class DummyTransport: + """Dummy transport class.""" + + protocol_data_received: Callable[[bytes], None] + + def __init__( + self, + loop: asyncio.AbstractEventLoop, + test_data: dict[bytes, tuple[str, bytes, bytes | None]] | None = None, + ) -> None: + """Initialize dummy transport class.""" + self._loop = loop + self._msg = 0 + self._seq_id = b"1233" + self._processed: list[bytes] = [] + self._first_response = test_data + self._second_response = test_data + if test_data is None: + self._first_response = pw_userdata.RESPONSE_MESSAGES + self._second_response = pw_userdata.SECOND_RESPONSE_MESSAGES + self.random_extra_byte = 0 + self._closing = False + + def is_closing(self) -> bool: + """Close connection.""" + return self._closing + + def write(self, data: bytes) -> None: + """Write data back to system.""" + log = None + ack = None + response = None + if data in self._processed and self._second_response is not None: + log, ack, response = self._second_response.get(data, (None, None, None)) + if log is None and self._first_response is not None: + log, ack, response = self._first_response.get(data, (None, None, None)) + if log is None: + resp = pw_userdata.PARTLY_RESPONSE_MESSAGES.get( + data[:24], (None, None, None) + ) + if resp is None: + _LOGGER.debug("No msg response for %s", str(data)) + return + log, ack, response = resp + if ack is None: + _LOGGER.debug("No ack response for %s", str(data)) + return + + self._seq_id = inc_seq_id(self._seq_id) + if response and self._msg == 0: + self.message_response_at_once(ack, response, self._seq_id) + self._processed.append(data) + else: + self.message_response(ack, self._seq_id) + self._processed.append(data) + if response is None or self._closing: + return + self._loop.create_task(self._delayed_response(response, self._seq_id)) + self._msg += 1 + + async def _delayed_response(self, data: bytes, seq_id: bytes) -> None: + delay = random.uniform(0.005, 0.025) + await asyncio.sleep(delay) + self.message_response(data, seq_id) + + def message_response(self, data: bytes, seq_id: bytes) -> None: + """Handle message response.""" + self.random_extra_byte += 1 + if self.random_extra_byte > 25: + self.protocol_data_received(b"\x83") + self.random_extra_byte = 0 + self.protocol_data_received(construct_message(data, seq_id) + b"\x83") + else: + self.protocol_data_received(construct_message(data, seq_id)) + + def message_response_at_once(self, ack: bytes, data: bytes, seq_id: bytes) -> None: + """Full message.""" + self.random_extra_byte += 1 + if self.random_extra_byte > 25: + self.protocol_data_received(b"\x83") + self.random_extra_byte = 0 + self.protocol_data_received( + construct_message(ack, seq_id) + + construct_message(data, seq_id) + + b"\x83" + ) + else: + self.protocol_data_received( + construct_message(ack, seq_id) + construct_message(data, seq_id) + ) + + def close(self) -> None: + """Close connection.""" + self._closing = True + + +class MockSerial: + """Mock serial connection.""" + + def __init__( + self, custom_response: dict[bytes, tuple[str, bytes, bytes | None]] | None + ) -> None: + """Init mocked serial connection.""" + self.custom_response = custom_response + self._protocol: pw_receiver.StickReceiver | None = None # type: ignore[name-defined] + self._transport: DummyTransport | None = None + + def inject_message(self, data: bytes, seq_id: bytes) -> None: + """Inject message to be received from stick.""" + if self._transport is None: + return + self._transport.message_response(data, seq_id) + + def trigger_connection_lost(self) -> None: + """Trigger connection lost.""" + if self._protocol is None: + return + self._protocol.connection_lost() + + async def mock_connection( + self, + loop: asyncio.AbstractEventLoop, + protocol_factory: Callable[[], pw_receiver.StickReceiver], # type: ignore[name-defined] + **kwargs: dict[str, Any], + ) -> tuple[DummyTransport, pw_receiver.StickReceiver]: # type: ignore[name-defined] + """Mock connection with dummy connection.""" + self._protocol = protocol_factory() + self._transport = DummyTransport(loop, self.custom_response) + self._transport.protocol_data_received = self._protocol.data_received + loop.call_soon_threadsafe(self._protocol.connection_made, self._transport) + return self._transport, self._protocol + + +class MockOsPath: + """Mock aiofiles.path class.""" + + async def exists(self, file_or_path: str) -> bool: + """Exists folder.""" + if file_or_path == "mock_folder_that_exists": + return True + if file_or_path == "mock_folder_that_exists/nodes.cache": + return True + if file_or_path == "mock_folder_that_exists\\nodes.cache": + return True + if file_or_path == "mock_folder_that_exists/0123456789ABCDEF.cache": + return True + if file_or_path == "mock_folder_that_exists\\0123456789ABCDEF.cache": + return True + if file_or_path == "mock_folder_that_exists\\file_that_exists.ext": + return True + return file_or_path == "mock_folder_that_exists/file_that_exists.ext" + + async def mkdir(self, path: str) -> None: + """Make dir.""" + return + + +class MockStickController: + """Mock stick controller.""" + + send_response = None + + async def subscribe_to_messages( + self, + node_response_callback: Callable[ # type: ignore[name-defined] + [pw_responses.PlugwiseResponse], Coroutine[Any, Any, bool] + ], + mac: bytes | None = None, + message_ids: tuple[bytes] | None = None, + ) -> Callable[[], None]: + """Subscribe a awaitable callback to be called when a specific message is received. + + Returns function to unsubscribe. + """ + + def dummy_method() -> None: + """Fake method.""" + + return dummy_method + + async def send( + self, + request: pw_requests.PlugwiseRequest, # type: ignore[name-defined] + suppress_node_errors: bool = True, + ) -> pw_responses.PlugwiseResponse | None: # type: ignore[name-defined] + """Submit request to queue and return response.""" + return self.send_response + + +aiofiles.threadpool.wrap.register(MagicMock)( + lambda *args, **kwargs: aiofiles.threadpool.AsyncBufferedIOBase(*args, **kwargs) # pylint: disable=unnecessary-lambda +) + + +class TestStick: + """Test USB Stick.""" + + test_node_awake: asyncio.Future[str] + test_node_loaded: asyncio.Future[str] + test_node_join: asyncio.Future[str] + test_connected: asyncio.Future[bool] + test_disconnected: asyncio.Future[bool] + test_relay_state_on: asyncio.Future[bool] + test_relay_state_off: asyncio.Future[bool] + test_motion_on: asyncio.Future[bool] + test_motion_off: asyncio.Future[bool] + test_init_relay_state_off: asyncio.Future[bool] + test_init_relay_state_on: asyncio.Future[bool] + + async def dummy_fn(self, request: pw_requests.PlugwiseRequest, test: bool) -> None: # type: ignore[name-defined] + """Callable dummy routine.""" + return + + @pytest.mark.asyncio + async def test_sorting_request_messages(self) -> None: + """Test request message priority sorting.""" + + node_add_request = pw_requests.NodeAddRequest( + self.dummy_fn, b"1111222233334444", True + ) + await asyncio.sleep(0.001) # Ensure timestamp is different + relay_switch_request = pw_requests.CircleRelaySwitchRequest( + self.dummy_fn, b"1234ABCD12341234", True + ) + await asyncio.sleep(0.001) # Ensure timestamp is different + circle_plus_allow_joining_request = pw_requests.CirclePlusAllowJoiningRequest( + self.dummy_fn, True + ) + + # validate sorting based on timestamp with same priority level + assert node_add_request < circle_plus_allow_joining_request + assert circle_plus_allow_joining_request > node_add_request + assert circle_plus_allow_joining_request >= node_add_request + assert node_add_request <= circle_plus_allow_joining_request + + # validate sorting based on priority + assert relay_switch_request > node_add_request + assert relay_switch_request >= node_add_request + assert node_add_request < relay_switch_request + assert node_add_request <= relay_switch_request + assert relay_switch_request > circle_plus_allow_joining_request + assert relay_switch_request >= circle_plus_allow_joining_request + assert circle_plus_allow_joining_request < relay_switch_request + assert circle_plus_allow_joining_request <= relay_switch_request + + # Change priority + node_add_request.priority = pw_requests.Priority.LOW + # Validate node_add_request is less than other requests + assert node_add_request < relay_switch_request + assert node_add_request <= relay_switch_request + assert node_add_request < circle_plus_allow_joining_request + assert node_add_request <= circle_plus_allow_joining_request + assert relay_switch_request > node_add_request + assert relay_switch_request >= node_add_request + assert circle_plus_allow_joining_request > node_add_request + assert circle_plus_allow_joining_request >= node_add_request + + @pytest.mark.asyncio + async def test_msg_properties(self) -> None: + """Test message properties.""" + + # UnixTimestamp + unix_timestamp = pw_msg_properties.UnixTimestamp( + dt(2011, 6, 27, 9, 4, 10, tzinfo=UTC), 8 + ) + assert unix_timestamp.serialize() == b"4E08478A" + with pytest.raises(pw_exceptions.MessageError): + assert unix_timestamp.value == dt(2011, 6, 27, 9, 4, 10, tzinfo=UTC) + unix_timestamp.deserialize(b"4E08478A") + assert unix_timestamp.value == dt(2011, 6, 27, 9, 4, 10, tzinfo=UTC) + + @pytest.mark.asyncio + async def test_stick_connect_without_port(self) -> None: + """Test connecting to stick without port config.""" + stick = pw_stick.Stick() + assert stick.accept_join_request is None + assert stick.nodes == {} + assert stick.joined_nodes is None + with pytest.raises(pw_exceptions.StickError): + assert stick.mac_stick + with pytest.raises(pw_exceptions.StickError): + assert stick.mac_coordinator + with pytest.raises(pw_exceptions.StickError): + assert stick.network_id + assert not stick.network_discovered + assert not stick.network_state + + with pytest.raises(pw_exceptions.StickError): + await stick.connect() + stick.port = "null" + with pytest.raises(pw_exceptions.StickError): + await stick.connect() + await stick.disconnect() + + @pytest.mark.asyncio + async def test_stick_reconnect(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Test connecting to stick while already connected.""" + monkeypatch.setattr( + pw_connection_manager, + "create_serial_connection", + MockSerial(None).mock_connection, + ) + stick = pw_stick.Stick() + stick.port = "test_port" + assert stick.port == "test_port" + await stick.connect() + # second time should raise + with pytest.raises(pw_exceptions.StickError): + await stick.connect() + await stick.disconnect() + + @pytest.mark.asyncio + async def test_stick_connect_without_response( + self, monkeypatch: pytest.MonkeyPatch + ) -> None: + """Test connecting to stick without response.""" + monkeypatch.setattr( + pw_connection_manager, + "create_serial_connection", + MockSerial( + { + b"FFFF": ( + "no response", + b"0000", + b"", + ), + } + ).mock_connection, + ) + monkeypatch.setattr(pw_sender, "STICK_TIME_OUT", 0.2) + stick = pw_stick.Stick() + stick.port = "test_port" + with pytest.raises(pw_exceptions.StickError): + await stick.initialize() + # Connect + await stick.connect() + # Still raise StickError connected but without response + with pytest.raises(pw_exceptions.StickError): + await stick.initialize() + await stick.disconnect() + + @pytest.mark.asyncio + async def test_stick_connect_timeout(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Test connecting to stick.""" + monkeypatch.setattr( + pw_connection_manager, + "create_serial_connection", + MockSerial( + { + b"\x05\x05\x03\x03000AB43C\r\n": ( + "STICK INIT timeout", + b"000000E1", # Timeout ack + None, + ), + } + ).mock_connection, + ) + monkeypatch.setattr(pw_sender, "STICK_TIME_OUT", 0.5) + stick = pw_stick.Stick() + await stick.connect("test_port") + with pytest.raises(pw_exceptions.StickError): + await stick.initialize() + await stick.disconnect() + + async def connected(self, event: pw_api.StickEvent) -> None: # type: ignore[name-defined] + """Set connected state helper.""" + if event is pw_api.StickEvent.CONNECTED: + self.test_connected.set_result(True) + else: + self.test_connected.set_exception(BaseException("Incorrect event")) + + @pytest.mark.asyncio + async def test_stick_connect(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Test connecting to stick.""" + monkeypatch.setattr( + pw_connection_manager, + "create_serial_connection", + MockSerial(None).mock_connection, + ) + stick = pw_stick.Stick(port="test_port", cache_enabled=False) + + unsub_connect = stick.subscribe_to_stick_events( + stick_event_callback=self.connected, + events=(pw_api.StickEvent.CONNECTED,), + ) + self.test_connected = asyncio.Future() + await stick.connect("test_port") + assert await self.test_connected + await stick.initialize() + assert stick.mac_stick == "0123456789012345" + assert stick.name == "Stick 12345" + assert stick.mac_coordinator == "0098765432101234" + assert stick.firmware == dt(2011, 6, 27, 8, 47, 37, tzinfo=UTC) + assert stick.hardware == "070085" + assert not stick.network_discovered + assert stick.network_state + assert stick.network_id == 17185 + assert stick.accept_join_request is None + # test failing of join requests without active discovery + with pytest.raises(pw_exceptions.StickError): + stick.accept_join_request = True + unsub_connect() + await stick.disconnect() + assert not stick.network_state + with pytest.raises(pw_exceptions.StickError): + assert stick.mac_stick + + async def disconnected(self, event: pw_api.StickEvent) -> None: # type: ignore[name-defined] + """Handle disconnect event callback.""" + if event is pw_api.StickEvent.DISCONNECTED: + self.test_disconnected.set_result(True) + else: + self.test_disconnected.set_exception(BaseException("Incorrect event")) + + @pytest.mark.asyncio + async def test_stick_connection_lost(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Test connecting to stick.""" + mock_serial = MockSerial(None) + monkeypatch.setattr( + pw_connection_manager, + "create_serial_connection", + mock_serial.mock_connection, + ) + stick = pw_stick.Stick() + await stick.connect("test_port") + await stick.initialize() + assert stick.network_state + self.test_disconnected = asyncio.Future() + unsub_disconnect = stick.subscribe_to_stick_events( + stick_event_callback=self.disconnected, + events=(pw_api.StickEvent.DISCONNECTED,), + ) + # Trigger disconnect + mock_serial.trigger_connection_lost() + assert await self.test_disconnected + assert not stick.network_state + unsub_disconnect() + await stick.disconnect() + + async def node_awake(self, event: pw_api.NodeEvent, mac: str) -> None: # type: ignore[name-defined] + """Handle awake event callback.""" + if event == pw_api.NodeEvent.AWAKE: + self.test_node_awake.set_result(mac) + else: + self.test_node_awake.set_exception( + BaseException( + f"Invalid {event} event, expected " + f"{pw_api.NodeEvent.AWAKE}" + ) + ) + + async def node_loaded(self, event: pw_api.NodeEvent, mac: str) -> None: # type: ignore[name-defined] + """Handle awake event callback.""" + if event == pw_api.NodeEvent.LOADED: + self.test_node_loaded.set_result(mac) + else: + self.test_node_loaded.set_exception( + BaseException( + f"Invalid {event} event, expected " + f"{pw_api.NodeEvent.LOADED}" + ) + ) + + async def node_motion_state( + self, + feature: pw_api.NodeFeature, # type: ignore[name-defined] + motion: pw_api.MotionState, # type: ignore[name-defined] + ) -> None: + """Handle motion event callback.""" + if feature == pw_api.NodeFeature.MOTION: + if motion.state: + self.test_motion_on.set_result(motion.state) + else: + self.test_motion_off.set_result(motion.state) + else: + self.test_motion_on.set_exception( + BaseException( + f"Invalid {feature} feature, expected " + + f"{pw_api.NodeFeature.MOTION}" + ) + ) + self.test_motion_off.set_exception( + BaseException( + f"Invalid {feature} feature, expected " + + f"{pw_api.NodeFeature.MOTION}" + ) + ) + + @pytest.mark.asyncio + async def test_stick_node_discovered_subscription( + self, monkeypatch: pytest.MonkeyPatch + ) -> None: + """Testing "new_node" subscription for Scan.""" + mock_serial = MockSerial(None) + monkeypatch.setattr( + pw_connection_manager, + "create_serial_connection", + mock_serial.mock_connection, + ) + monkeypatch.setattr(pw_sender, "STICK_TIME_OUT", 0.1) + monkeypatch.setattr(pw_requests, "NODE_TIME_OUT", 0.5) + stick = pw_stick.Stick("test_port", cache_enabled=False) + await stick.connect() + await stick.initialize() + await stick.discover_nodes(load=False) + stick.accept_join_request = True + self.test_node_awake = asyncio.Future() + unsub_awake = stick.subscribe_to_node_events( + node_event_callback=self.node_awake, + events=(pw_api.NodeEvent.AWAKE,), + ) + + # Inject NodeAwakeResponse message to trigger a 'node discovered' event + mock_serial.inject_message(b"004F555555555555555500", b"FFFE") + mac_awake_node = await self.test_node_awake + assert mac_awake_node == "5555555555555555" + unsub_awake() + + assert await stick.nodes["5555555555555555"].load() + assert stick.nodes["5555555555555555"].node_info.firmware == dt( + 2011, 6, 27, 8, 55, 44, tzinfo=UTC + ) + assert stick.nodes["5555555555555555"].node_info.version == "080007" + assert stick.nodes["5555555555555555"].node_info.model == "Scan" + assert stick.nodes["5555555555555555"].node_info.model_type == None + assert stick.nodes["5555555555555555"].available + assert stick.nodes["5555555555555555"].node_info.is_battery_powered + assert sorted(stick.nodes["5555555555555555"].features) == sorted( + ( + pw_api.NodeFeature.AVAILABLE, + pw_api.NodeFeature.BATTERY, + pw_api.NodeFeature.INFO, + pw_api.NodeFeature.PING, + pw_api.NodeFeature.MOTION, + pw_api.NodeFeature.MOTION_CONFIG, + ) + ) + + # Check Scan is raising NodeError for unsupported features + with pytest.raises(pw_exceptions.FeatureError): + assert stick.nodes["5555555555555555"].relay + with pytest.raises(pw_exceptions.FeatureError): + assert stick.nodes["5555555555555555"].relay_state + with pytest.raises(pw_exceptions.FeatureError): + assert stick.nodes["5555555555555555"].switch + with pytest.raises(pw_exceptions.FeatureError): + assert stick.nodes["5555555555555555"].power + with pytest.raises(pw_exceptions.FeatureError): + assert stick.nodes["5555555555555555"].humidity + with pytest.raises(pw_exceptions.FeatureError): + assert stick.nodes["5555555555555555"].temperature + with pytest.raises(pw_exceptions.FeatureError): + assert stick.nodes["5555555555555555"].energy + + # Motion + self.test_motion_on = asyncio.Future() + self.test_motion_off = asyncio.Future() + unsub_motion = stick.nodes["5555555555555555"].subscribe_to_feature_update( + node_feature_callback=self.node_motion_state, + features=(pw_api.NodeFeature.MOTION,), + ) + # Inject motion message to trigger a 'motion on' event + mock_serial.inject_message(b"005655555555555555550001", b"FFFF") + motion_on = await self.test_motion_on + assert motion_on + assert stick.nodes["5555555555555555"].motion + + # Inject motion message to trigger a 'motion off' event + mock_serial.inject_message(b"005655555555555555550000", b"FFFF") + motion_off = await self.test_motion_off + assert not motion_off + assert not stick.nodes["5555555555555555"].motion + unsub_motion() + + await stick.disconnect() + + async def node_join(self, event: pw_api.NodeEvent, mac: str) -> None: # type: ignore[name-defined] + """Handle join event callback.""" + if event == pw_api.NodeEvent.JOIN: + self.test_node_join.set_result(mac) + else: + self.test_node_join.set_exception( + BaseException( + f"Invalid {event} event, expected " + f"{pw_api.NodeEvent.JOIN}" + ) + ) + + @pytest.mark.asyncio + async def test_stick_node_join_subscription( + self, monkeypatch: pytest.MonkeyPatch + ) -> None: + """Testing "new_node" subscription.""" + mock_serial = MockSerial(None) + monkeypatch.setattr( + pw_connection_manager, + "create_serial_connection", + mock_serial.mock_connection, + ) + monkeypatch.setattr(pw_sender, "STICK_TIME_OUT", 0.1) + monkeypatch.setattr(pw_requests, "NODE_TIME_OUT", 0.5) + stick = pw_stick.Stick("test_port", cache_enabled=False) + await stick.connect() + await stick.initialize() + await stick.discover_nodes(load=False) + self.test_node_join = asyncio.Future() + unusb_join = stick.subscribe_to_node_events( + node_event_callback=self.node_join, + events=(pw_api.NodeEvent.JOIN,), + ) + + # Inject node join request message + mock_serial.inject_message(b"00069999999999999999", b"FFFC") + mac_join_node = await self.test_node_join + assert mac_join_node == "9999999999999999" + unusb_join() + await stick.disconnect() + + @pytest.mark.asyncio + async def test_node_discovery(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Testing discovery of nodes.""" + mock_serial = MockSerial(None) + monkeypatch.setattr( + pw_connection_manager, + "create_serial_connection", + mock_serial.mock_connection, + ) + monkeypatch.setattr(pw_sender, "STICK_TIME_OUT", 0.2) + monkeypatch.setattr(pw_requests, "NODE_TIME_OUT", 2.0) + stick = pw_stick.Stick("test_port", cache_enabled=False) + await stick.connect() + await stick.initialize() + await stick.discover_nodes(load=False) + assert stick.joined_nodes == 11 + assert stick.nodes.get("0098765432101234") is not None + assert len(stick.nodes) == 6 # Discovered nodes + await stick.disconnect() + + async def node_relay_state( + self, + feature: pw_api.NodeFeature, # type: ignore[name-defined] + state: pw_api.RelayState, # type: ignore[name-defined] + ) -> None: + """Handle relay event callback.""" + if feature == pw_api.NodeFeature.RELAY: + if state.state: + self.test_relay_state_on.set_result(state.state) + else: + self.test_relay_state_off.set_result(state.state) + else: + self.test_relay_state_on.set_exception( + BaseException( + f"Invalid {feature} feature, expected " + + f"{pw_api.NodeFeature.RELAY}" + ) + ) + self.test_relay_state_off.set_exception( + BaseException( + f"Invalid {feature} feature, expected " + + f"{pw_api.NodeFeature.RELAY}" + ) + ) + + async def node_init_relay_state( + self, + feature: pw_api.NodeFeature, # type: ignore[name-defined] + config: pw_api.RelayConfig, # type: ignore[name-defined] + ) -> None: + """Relay Callback for event.""" + if feature == pw_api.NodeFeature.RELAY_INIT: + if config.init_state: + self.test_init_relay_state_on.set_result(config.init_state) + else: + self.test_init_relay_state_off.set_result(config.init_state) + else: + self.test_init_relay_state_on.set_exception( + BaseException( + f"Invalid {feature} feature, expected " + + f"{pw_api.NodeFeature.RELAY_INIT}" + ) + ) + self.test_init_relay_state_off.set_exception( + BaseException( + f"Invalid {feature} feature, expected " + + f"{pw_api.NodeFeature.RELAY_INIT}" + ) + ) + + @pytest.mark.asyncio + async def test_node_relay_and_power(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Testing discovery of nodes.""" + mock_serial = MockSerial(None) + monkeypatch.setattr( + pw_connection_manager, + "create_serial_connection", + mock_serial.mock_connection, + ) + monkeypatch.setattr(pw_sender, "STICK_TIME_OUT", 0.2) + monkeypatch.setattr(pw_requests, "NODE_TIME_OUT", 2.0) + stick = pw_stick.Stick("test_port", cache_enabled=False) + await stick.connect() + await stick.initialize() + await stick.discover_nodes(load=False) + + # Validate if NodeError is raised when device is not loaded + with pytest.raises(pw_exceptions.NodeError): + await stick.nodes["0098765432101234"].set_relay(True) + + # Manually load node + assert await stick.nodes["0098765432101234"].load() + + unsub_relay = stick.nodes["0098765432101234"].subscribe_to_feature_update( + node_feature_callback=self.node_relay_state, + features=(pw_api.NodeFeature.RELAY,), + ) + + # Test async switching back from on to off + self.test_relay_state_off = asyncio.Future() + assert not await stick.nodes["0098765432101234"].set_relay(False) + assert not await self.test_relay_state_off + assert not stick.nodes["0098765432101234"].relay + + # Test async switching back from off to on + self.test_relay_state_on = asyncio.Future() + assert await stick.nodes["0098765432101234"].set_relay(True) + assert await self.test_relay_state_on + assert stick.nodes["0098765432101234"].relay + + # Test async switching back from on to off + self.test_relay_state_off = asyncio.Future() + await stick.nodes["0098765432101234"].relay_off() + assert not await self.test_relay_state_off + assert not stick.nodes["0098765432101234"].relay + assert not stick.nodes["0098765432101234"].relay_state.state + + # Test async switching back from off to on + self.test_relay_state_on = asyncio.Future() + await stick.nodes["0098765432101234"].relay_on() + assert await self.test_relay_state_on + assert stick.nodes["0098765432101234"].relay + assert stick.nodes["0098765432101234"].relay_state.state + + unsub_relay() + + # Check if node is online + assert await stick.nodes["0098765432101234"].is_online() + + # Test non-support relay configuration + with pytest.raises(pw_exceptions.FeatureError): + assert stick.nodes["0098765432101234"].relay_config is not None + with pytest.raises(pw_exceptions.FeatureError): + await stick.nodes["0098765432101234"].set_relay_init(True) + with pytest.raises(pw_exceptions.FeatureError): + await stick.nodes["0098765432101234"].set_relay_init(False) + + # Check Circle is raising NodeError for unsupported features + with pytest.raises(pw_exceptions.FeatureError): + assert stick.nodes["0098765432101234"].motion + with pytest.raises(pw_exceptions.FeatureError): + assert stick.nodes["0098765432101234"].switch + with pytest.raises(pw_exceptions.FeatureError): + assert stick.nodes["0098765432101234"].humidity + with pytest.raises(pw_exceptions.FeatureError): + assert stick.nodes["0098765432101234"].temperature + + # Test relay init + # load node 2222222222222222 which has + # the firmware with init relay feature + + # Validate if NodeError is raised when device is not loaded + with pytest.raises(pw_exceptions.NodeError): + await stick.nodes["2222222222222222"].set_relay_init(True) + + assert await stick.nodes["2222222222222222"].load() + self.test_init_relay_state_on = asyncio.Future() + self.test_init_relay_state_off = asyncio.Future() + unsub_inti_relay = stick.nodes["2222222222222222"].subscribe_to_feature_update( + node_feature_callback=self.node_init_relay_state, + features=(pw_api.NodeFeature.RELAY_INIT,), + ) + + # Test async switching back init_state from on to off + assert stick.nodes["2222222222222222"].relay_config.init_state + self.test_init_relay_state_off = asyncio.Future() + assert not await stick.nodes["2222222222222222"].set_relay_init(False) + assert not await self.test_init_relay_state_off + assert not stick.nodes["2222222222222222"].relay_config.init_state + + # Test async switching back from off to on + self.test_init_relay_state_on = asyncio.Future() + assert await stick.nodes["2222222222222222"].set_relay_init(True) + assert await self.test_init_relay_state_on + assert stick.nodes["2222222222222222"].relay_config.init_state + + unsub_inti_relay() + + await stick.disconnect() + + @pytest.mark.asyncio + async def test_energy_circle(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Testing energy retrieval.""" + mock_serial = MockSerial(None) + monkeypatch.setattr( + pw_connection_manager, + "create_serial_connection", + mock_serial.mock_connection, + ) + monkeypatch.setattr(pw_energy_pulses, "MAX_LOG_HOURS", 25) + monkeypatch.setattr(pw_sender, "STICK_TIME_OUT", 0.2) + monkeypatch.setattr(pw_requests, "NODE_TIME_OUT", 2.0) + + async def fake_get_missing_energy_logs(address: int) -> None: + """Mock missing energy logs.""" + + monkeypatch.setattr( + pw_circle.PlugwiseCircle, + "get_missing_energy_logs", + fake_get_missing_energy_logs, + ) + stick = pw_stick.Stick("test_port", cache_enabled=False) + await stick.connect() + await stick.initialize() + await stick.discover_nodes(load=False) + + # Check calibration in unloaded state + assert not stick.nodes["0098765432101234"].calibrated + + # Manually load node + assert await stick.nodes["0098765432101234"].load() + + # Check calibration in loaded state + assert stick.nodes["0098765432101234"].calibrated + + # Test power state without request + assert stick.nodes["0098765432101234"].power == pw_api.PowerStatistics( + last_second=None, last_8_seconds=None, timestamp=None + ) + pu = await stick.nodes["0098765432101234"].power_update() + assert pu.last_second == 21.2780505980402 + assert pu.last_8_seconds == -27.150578775440106 + + # Test energy state without request + assert stick.nodes["0098765432101234"].energy == pw_api.EnergyStatistics( + log_interval_consumption=None, + log_interval_production=None, + hour_consumption=None, + hour_consumption_reset=None, + day_consumption=None, + day_consumption_reset=None, + week_consumption=None, + week_consumption_reset=None, + hour_production=None, + hour_production_reset=None, + day_production=None, + day_production_reset=None, + week_production=None, + week_production_reset=None, + ) + # energy_update is not complete and should return none + utc_now = dt.now(UTC) + assert await stick.nodes["0098765432101234"].energy_update() is None + # Allow for background task to finish + + assert stick.nodes["0098765432101234"].energy == pw_api.EnergyStatistics( + log_interval_consumption=60, + log_interval_production=None, + hour_consumption=0.0026868922443345974, + hour_consumption_reset=utc_now.replace(minute=0, second=0, microsecond=0), + day_consumption=None, + day_consumption_reset=None, + week_consumption=None, + week_consumption_reset=None, + hour_production=None, + hour_production_reset=None, + day_production=None, + day_production_reset=None, + week_production=None, + week_production_reset=None, + ) + await stick.disconnect() + + @freeze_time(dt.now()) + def test_pulse_collection_consumption( + self, monkeypatch: pytest.MonkeyPatch + ) -> None: + """Testing pulse collection class.""" + monkeypatch.setattr(pw_energy_pulses, "MAX_LOG_HOURS", 24) + + fixed_timestamp_utc = dt.now(UTC) + fixed_this_hour = fixed_timestamp_utc.replace(minute=0, second=0, microsecond=0) + + # Test consumption logs + tst_consumption = pw_energy_pulses.PulseCollection(mac="0098765432101234") + assert tst_consumption.log_addresses_missing is None + assert tst_consumption.production_logging == False # is None + + # Test consumption - Log import #1 + # No missing addresses yet + test_timestamp = fixed_this_hour + tst_consumption.add_log(100, 1, test_timestamp, 1000) + assert tst_consumption.log_interval_consumption is None + assert tst_consumption.log_interval_production is None + assert not tst_consumption.production_logging # is None + assert tst_consumption.collected_pulses( + test_timestamp, is_consumption=True + ) == (None, None) + assert tst_consumption.log_addresses_missing is None + + # Test consumption - Log import #2, random log + # No missing addresses yet + # return intermediate missing addresses + test_timestamp = fixed_this_hour - td(hours=17) + tst_consumption.add_log(95, 4, test_timestamp, 1000) + assert tst_consumption.log_interval_consumption is None + assert tst_consumption.log_interval_production is None + assert not tst_consumption.production_logging # is None + assert tst_consumption.collected_pulses( + test_timestamp, is_consumption=True + ) == (None, None) + assert tst_consumption.log_addresses_missing == [99, 98, 97, 96] + + # Test consumption - Log import #3 + # log next to existing with different timestamp + # so 'production logging' should be marked as False now + test_timestamp = fixed_this_hour - td(hours=18) + tst_consumption.add_log(95, 3, test_timestamp, 1000) + assert tst_consumption.log_interval_consumption is None + assert tst_consumption.log_interval_production is None + assert not tst_consumption.production_logging + assert tst_consumption.collected_pulses( + test_timestamp, is_consumption=True + ) == (None, None) + assert tst_consumption.log_addresses_missing == [99, 98, 97, 96] + + # Test consumption - Log import #4, no change + test_timestamp = fixed_this_hour - td(hours=19) + tst_consumption.add_log(95, 2, test_timestamp, 1000) + assert tst_consumption.log_interval_consumption is None + assert tst_consumption.log_interval_production is None + assert not tst_consumption.production_logging + assert tst_consumption.collected_pulses( + test_timestamp, is_consumption=True + ) == (None, None) + assert tst_consumption.log_addresses_missing == [99, 98, 97, 96] + + # Test consumption - Log import #5 + # Complete log import for address 95 so it must drop from missing list + test_timestamp = fixed_this_hour - td(hours=20) + tst_consumption.add_log(95, 1, test_timestamp, 1000) + assert tst_consumption.log_interval_consumption is None + assert tst_consumption.log_interval_production is None + assert not tst_consumption.production_logging + assert tst_consumption.log_addresses_missing == [99, 98, 97, 96] + + # Test consumption - Log import #6 + # Add before last log so interval of consumption must be determined + test_timestamp = fixed_this_hour - td(hours=1) + tst_consumption.add_log(99, 4, test_timestamp, 750) + assert tst_consumption.log_interval_consumption == 60 + assert tst_consumption.log_interval_production is None + assert not tst_consumption.production_logging + assert tst_consumption.log_addresses_missing == [99, 98, 97, 96] + assert tst_consumption.collected_pulses( + fixed_this_hour, is_consumption=True + ) == (None, None) + + tst_consumption.add_log(99, 3, fixed_this_hour - td(hours=2), 1111) + assert tst_consumption.log_interval_consumption == 60 + assert tst_consumption.log_interval_production is None + assert not tst_consumption.production_logging + assert tst_consumption.log_addresses_missing == [99, 98, 97, 96] + assert tst_consumption.collected_pulses( + fixed_this_hour, is_consumption=True + ) == (None, None) + + # Test consumption - pulse update #1 + pulse_update_1 = fixed_this_hour + td(minutes=5) + tst_consumption.update_pulse_counter(1234, 0, pulse_update_1) + assert tst_consumption.collected_pulses( + fixed_this_hour, is_consumption=True + ) == (1234, pulse_update_1) + assert tst_consumption.collected_pulses( + fixed_this_hour, is_consumption=False + ) == (None, None) + assert tst_consumption.log_addresses_missing == [99, 98, 97, 96] + + # Test consumption - pulse update #2 + pulse_update_2 = fixed_this_hour + td(minutes=7) + test_timestamp = fixed_this_hour + tst_consumption.update_pulse_counter(2345, 0, pulse_update_2) + assert tst_consumption.collected_pulses( + test_timestamp, is_consumption=True + ) == (2345, pulse_update_2) + assert tst_consumption.collected_pulses( + test_timestamp, is_consumption=False + ) == (None, None) + + # Test consumption - pulses + log (address=100, slot=1) + test_timestamp = fixed_this_hour - td(hours=1) + assert tst_consumption.collected_pulses( + test_timestamp, is_consumption=True + ) == (2345 + 1000, pulse_update_2) + assert tst_consumption.collected_pulses( + test_timestamp, is_consumption=False + ) == (None, None) + assert tst_consumption.log_addresses_missing == [99, 98, 97, 96] + + # Test consumption - pulses + logs (address=100, slot=1 & address=99, slot=4) + test_timestamp = fixed_this_hour - td(hours=2) + assert tst_consumption.collected_pulses( + test_timestamp, is_consumption=True + ) == (2345 + 1000 + 750, pulse_update_2) + assert tst_consumption.collected_pulses( + test_timestamp, is_consumption=False + ) == (None, None) + + # Test consumption - pulses + missing logs + test_timestamp = fixed_this_hour - td(hours=3) + assert tst_consumption.collected_pulses( + test_timestamp, is_consumption=True + ) == (None, None) + assert tst_consumption.collected_pulses( + test_timestamp, is_consumption=False + ) == (None, None) + + assert not tst_consumption.log_rollover + # add missing logs + test_timestamp = fixed_this_hour - td(hours=3) + tst_consumption.add_log(99, 2, (fixed_this_hour - td(hours=3)), 1000) + tst_consumption.add_log(99, 1, (fixed_this_hour - td(hours=4)), 1000) + tst_consumption.add_log(98, 4, (fixed_this_hour - td(hours=5)), 1000) + tst_consumption.add_log(98, 3, (fixed_this_hour - td(hours=6)), 1000) + tst_consumption.add_log(98, 2, (fixed_this_hour - td(hours=7)), 1000) + tst_consumption.add_log(98, 1, (fixed_this_hour - td(hours=8)), 1000) + tst_consumption.add_log(97, 4, (fixed_this_hour - td(hours=9)), 1000) + tst_consumption.add_log(97, 3, (fixed_this_hour - td(hours=10)), 1000) + tst_consumption.add_log(97, 2, (fixed_this_hour - td(hours=11)), 1000) + tst_consumption.add_log(97, 1, (fixed_this_hour - td(hours=12)), 1000) + tst_consumption.add_log(96, 4, (fixed_this_hour - td(hours=13)), 1000) + tst_consumption.add_log(96, 3, (fixed_this_hour - td(hours=14)), 1000) + tst_consumption.add_log(96, 2, (fixed_this_hour - td(hours=15)), 1000) + tst_consumption.add_log(96, 1, (fixed_this_hour - td(hours=16)), 1000) + tst_consumption.add_log(94, 4, (fixed_this_hour - td(hours=21)), 1000) + tst_consumption.add_log(94, 3, (fixed_this_hour - td(hours=22)), 1000) + + # Log 24 (max hours) must be dropped + assert tst_consumption.collected_logs == 23 + tst_consumption.add_log(94, 2, (fixed_this_hour - td(hours=23)), 1000) + assert tst_consumption.collected_logs == 24 + tst_consumption.add_log(94, 1, (fixed_this_hour - td(hours=24)), 1000) + assert tst_consumption.collected_logs == 24 + + # Test rollover by updating pulses before log record + assert not tst_consumption.log_rollover + pulse_update_3 = fixed_this_hour + td(hours=1, seconds=3) + tst_consumption.update_pulse_counter(45, 0, pulse_update_3) + assert tst_consumption.log_rollover + test_timestamp = fixed_this_hour + td(hours=1, seconds=5) + assert tst_consumption.collected_pulses( + test_timestamp, is_consumption=True + ) == (None, None) + tst_consumption.add_log(100, 2, (fixed_this_hour + td(hours=1)), 2222) + assert not tst_consumption.log_rollover + assert tst_consumption.collected_pulses( + test_timestamp, is_consumption=True + ) == (45, pulse_update_3) + assert tst_consumption.collected_pulses( + fixed_this_hour, is_consumption=True + ) == (45 + 2222, pulse_update_3) + + # Test log rollover by updating log first before updating pulses + tst_consumption.add_log(100, 3, (fixed_this_hour + td(hours=2)), 3333) + assert tst_consumption.log_rollover + assert tst_consumption.collected_pulses( + fixed_this_hour, is_consumption=True + ) == (None, None) + pulse_update_4 = fixed_this_hour + td(hours=2, seconds=10) + tst_consumption.update_pulse_counter(321, 0, pulse_update_4) + assert not tst_consumption.log_rollover + assert tst_consumption.collected_pulses( + fixed_this_hour, is_consumption=True + ) == (2222 + 3333 + 321, pulse_update_4) + + @freeze_time(dt.now()) + def test_pulse_collection_consumption_empty( + self, monkeypatch: pytest.MonkeyPatch + ) -> None: + """Testing pulse collection class.""" + monkeypatch.setattr(pw_energy_pulses, "MAX_LOG_HOURS", 24) + + fixed_timestamp_utc = dt.now(UTC) + fixed_this_hour = fixed_timestamp_utc.replace(minute=0, second=0, microsecond=0) + + # Import consumption logs + tst_pc = pw_energy_pulses.PulseCollection(mac="0098765432101234") + tst_pc.add_log(100, 1, fixed_this_hour - td(hours=5), 1000) + assert tst_pc.log_addresses_missing is None + tst_pc.add_log(99, 4, fixed_this_hour - td(hours=6), 750) + assert tst_pc.log_addresses_missing == [99, 98, 97, 96, 95] + tst_pc.add_log(99, 3, fixed_this_hour - td(hours=7), 3750) + tst_pc.add_log(99, 2, fixed_this_hour - td(hours=8), 750) + tst_pc.add_log(99, 1, fixed_this_hour - td(hours=9), 2750) + assert tst_pc.log_addresses_missing == [98, 97, 96, 95] + tst_pc.add_log(98, 4, fixed_this_hour - td(hours=10), 1750) + assert tst_pc.log_addresses_missing == [98, 97, 96, 95] + + # test empty log prior + tst_pc.add_empty_log(98, 3) + assert tst_pc.log_addresses_missing == [] + + tst_pc.add_log(100, 2, fixed_this_hour - td(hours=5), 1750) + tst_pc.add_empty_log(100, 3) + assert tst_pc.log_addresses_missing == [] + + tst_pc.add_log(100, 3, fixed_this_hour - td(hours=4), 1750) + assert tst_pc.log_addresses_missing == [] + + tst_pc.add_log(101, 2, fixed_this_hour - td(hours=1), 1234) + assert tst_pc.log_addresses_missing == [101, 100] + + tst_pc.add_log(101, 1, fixed_this_hour - td(hours=1), 1234) + assert tst_pc.log_addresses_missing == [100] + + @freeze_time(dt.now()) + def test_pulse_collection_production(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Testing pulse collection class.""" + + # Set log hours to 1 week + monkeypatch.setattr(pw_energy_pulses, "MAX_LOG_HOURS", 168) + + fixed_timestamp_utc = dt.now(UTC) + fixed_this_hour = fixed_timestamp_utc.replace(minute=0, second=0, microsecond=0) + + # Test consumption and production logs + tst_production = pw_energy_pulses.PulseCollection(mac="0098765432101234") + assert tst_production.log_addresses_missing is None + assert not tst_production.production_logging # is None + + # Test consumption & production - Log import #1 - production + # Missing addresses can not be determined yet + test_timestamp = fixed_this_hour - td(hours=1) + tst_production.add_log(200, 2, test_timestamp, 2000) + assert tst_production.log_addresses_missing is None + # assert tst_production.production_logging is None + + # Test consumption & production - Log import #2 - consumption + # production must be enabled & intervals are unknown + # Log at address 200 is known and expect production logs too + test_timestamp = fixed_this_hour - td(hours=1) + tst_production.add_log(200, 1, test_timestamp, 1000) + assert tst_production.log_addresses_missing is None + assert tst_production.log_interval_consumption is None + assert tst_production.log_interval_production is None + # assert tst_production.production_logging + + # Test consumption & production - Log import #3 - production + # Interval of consumption is not yet available + test_timestamp = fixed_this_hour - td(hours=2) # type: ignore[unreachable] + tst_production.add_log(199, 4, test_timestamp, 4000) + missing_check = list(range(199, 157, -1)) + assert tst_production.log_addresses_missing == missing_check + # assert tst_production.log_interval_consumption is None + # assert tst_production.log_interval_production == 60 + # assert tst_production.production_logging + + # Test consumption & production - Log import #4 + # Interval of consumption is available + test_timestamp = fixed_this_hour - td(hours=2) + tst_production.add_log(199, 3, test_timestamp, 3000) + assert tst_production.log_addresses_missing == missing_check + # assert tst_production.log_interval_consumption == 60 + # assert tst_production.log_interval_production == 60 + # assert tst_production.production_logging + + pulse_update_1 = fixed_this_hour + td(minutes=5) + tst_production.update_pulse_counter(100, -50, pulse_update_1) + # assert tst_production.collected_pulses( + # fixed_this_hour, is_consumption=True + # ) == (100, pulse_update_1) + # assert tst_production.collected_pulses( + # fixed_this_hour, is_consumption=False + # ) == (50, pulse_update_1) + # assert tst_production.collected_pulses( + # fixed_this_hour - td(hours=1), is_consumption=True + # ) == (100, pulse_update_1) + # assert tst_production.collected_pulses( + # fixed_this_hour - td(hours=2), is_consumption=True + # ) == (1000 + 100, pulse_update_1) + # assert tst_production.collected_pulses( + # fixed_this_hour - td(hours=1), is_consumption=False + # ) == (50, pulse_update_1) + # assert tst_production.collected_pulses( + # fixed_this_hour - td(hours=2), is_consumption=False + # ) == (2000 + 50, pulse_update_1) + + _pulse_update = 0 + + @freeze_time(dt.now()) + def test_log_address_rollover(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Test log address rollover.""" + + # Set log hours to 25 + monkeypatch.setattr(pw_energy_pulses, "MAX_LOG_HOURS", 24) + + fixed_timestamp_utc = dt.now(UTC) + fixed_this_hour = fixed_timestamp_utc.replace(minute=0, second=0, microsecond=0) + tst_pc = pw_energy_pulses.PulseCollection(mac="0098765432101234") + tst_pc.add_log(2, 1, fixed_this_hour - td(hours=1), 3000) + tst_pc.add_log(1, 4, fixed_this_hour - td(hours=2), 3000) + tst_pc.add_log(1, 3, fixed_this_hour - td(hours=3), 3000) + assert tst_pc.log_addresses_missing == [6015, 6014, 6013, 6012, 1, 0] + + # test + tst_pc = pw_energy_pulses.PulseCollection(mac="0098765432101234") + tst_pc.add_log(2, 4, fixed_this_hour - td(hours=1), 0) # prod + tst_pc.add_log(2, 3, fixed_this_hour - td(hours=1), 23935) # con + tst_pc.add_log(2, 2, fixed_this_hour - td(hours=2), 0) # prod + tst_pc.add_log(2, 1, fixed_this_hour - td(hours=2), 10786) # con + # <-- logs 0 & 1 are missing for hours 3, 4, 5 & 6 --> + tst_pc.add_log(6015, 4, fixed_this_hour - td(hours=7), 0) + tst_pc.add_log(6015, 3, fixed_this_hour - td(hours=7), 11709) + tst_pc.add_log(6015, 2, fixed_this_hour - td(hours=8), 0) + tst_pc.add_log(6015, 1, fixed_this_hour - td(hours=8), 10382) + assert tst_pc.log_addresses_missing == [1, 0] + + def pulse_update( + self, timestamp: dt, is_consumption: bool + ) -> tuple[int | None, dt | None]: + """Update pulse helper for energy counter.""" + self._pulse_update += 1 + if self._pulse_update == 1: + return (None, None) + if self._pulse_update == 2: + return (None, timestamp + td(minutes=5)) + if self._pulse_update == 3: + return (2222, None) + if self._pulse_update == 4: + return (2222, timestamp + td(minutes=10)) + return (3333, timestamp + td(minutes=15, seconds=10)) + + @freeze_time(dt.now()) + def test_energy_counter(self) -> None: + """Testing energy counter class.""" + pulse_col_mock = Mock() + pulse_col_mock.collected_pulses.side_effect = self.pulse_update + + fixed_timestamp_utc = dt.now(UTC) + fixed_timestamp_local = dt.now(dt.now(UTC).astimezone().tzinfo) + + _LOGGER.debug( + "test_energy_counter | fixed_timestamp-utc = %s", str(fixed_timestamp_utc) + ) + + calibration_config = pw_energy_calibration.EnergyCalibration(1, 2, 3, 4) + + # Initialize hour counter + energy_counter_init = pw_energy_counter.EnergyCounter( + pw_energy_counter.EnergyType.CONSUMPTION_HOUR, "fake mac" + ) + assert energy_counter_init.calibration is None + energy_counter_init.calibration = calibration_config + + assert energy_counter_init.energy is None + assert energy_counter_init.is_consumption + assert energy_counter_init.last_reset is None + assert energy_counter_init.last_update is None + + # First update (None, None) + assert energy_counter_init.update(pulse_col_mock) == (None, None) + assert energy_counter_init.energy is None + assert energy_counter_init.last_reset is None + assert energy_counter_init.last_update is None + # Second update (None, timestamp) + assert energy_counter_init.update(pulse_col_mock) == (None, None) + assert energy_counter_init.energy is None + assert energy_counter_init.last_reset is None + assert energy_counter_init.last_update is None + # Third update (2222, None) + assert energy_counter_init.update(pulse_col_mock) == (None, None) + assert energy_counter_init.energy is None + assert energy_counter_init.last_reset is None + assert energy_counter_init.last_update is None + + # forth update (2222, timestamp + 00:10:00) + reset_timestamp = fixed_timestamp_local.replace( + minute=0, second=0, microsecond=0 + ) + assert energy_counter_init.update(pulse_col_mock) == ( + 0.07204743061527973, + reset_timestamp, + ) + assert energy_counter_init.energy == 0.07204743061527973 + assert energy_counter_init.last_reset == reset_timestamp + assert energy_counter_init.last_update == reset_timestamp + td(minutes=10) + + # fifth update (3333, timestamp + 00:15:10) + assert energy_counter_init.update(pulse_col_mock) == ( + 0.08263379198066137, + reset_timestamp, + ) + assert energy_counter_init.energy == 0.08263379198066137 + assert energy_counter_init.last_reset == reset_timestamp + assert energy_counter_init.last_update == reset_timestamp + td( + minutes=15, seconds=10 + ) + + # Production hour + energy_counter_p_h = pw_energy_counter.EnergyCounter( + pw_energy_counter.EnergyType.PRODUCTION_HOUR, "fake mac" + ) + assert not energy_counter_p_h.is_consumption + + @pytest.mark.asyncio + async def test_creating_request_messages(self) -> None: + """Test create request message.""" + node_network_info_request = pw_requests.StickNetworkInfoRequest( + self.dummy_fn, None + ) + assert node_network_info_request.serialize() == b"\x05\x05\x03\x030001CAAB\r\n" + circle_plus_connect_request = pw_requests.CirclePlusConnectRequest( + self.dummy_fn, b"1111222233334444" + ) + assert ( + circle_plus_connect_request.serialize() + == b"\x05\x05\x03\x030004000000000000000000001111222233334444BDEC\r\n" + ) + node_add_request = pw_requests.NodeAddRequest( + self.dummy_fn, b"1111222233334444", True + ) + assert ( + node_add_request.serialize() + == b"\x05\x05\x03\x0300070111112222333344445578\r\n" + ) + node_reset_request = pw_requests.NodeResetRequest( + self.dummy_fn, b"1111222233334444", 2, 5 + ) + assert ( + node_reset_request.serialize() + == b"\x05\x05\x03\x030009111122223333444402053D5C\r\n" + ) + node_image_activate_request = pw_requests.NodeImageActivateRequest( + self.dummy_fn, b"1111222233334444", 2, 5 + ) + assert ( + node_image_activate_request.serialize() + == b"\x05\x05\x03\x03000F1111222233334444020563AA\r\n" + ) + circle_log_data_request = pw_requests.CircleLogDataRequest( + self.dummy_fn, + b"1111222233334444", + dt(2022, 5, 3, 0, 0, 0), + dt(2022, 5, 10, 23, 0, 0), + ) + assert ( + circle_log_data_request.serialize() + == b"\x05\x05\x03\x030014111122223333444416050B4016053804AD3A\r\n" + ) + node_remove_request = pw_requests.NodeRemoveRequest( + self.dummy_fn, b"1111222233334444", "5555666677778888" + ) + assert ( + node_remove_request.serialize() + == b"\x05\x05\x03\x03001C11112222333344445555666677778888D89C\r\n" + ) + + circle_plus_realtimeclock_request = ( + pw_requests.CirclePlusRealTimeClockSetRequest( + self.dummy_fn, b"1111222233334444", dt(2022, 5, 4, 3, 1, 0) + ) + ) + assert ( + circle_plus_realtimeclock_request.serialize() + == b"\x05\x05\x03\x030028111122223333444400010302040522ADE2\r\n" + ) + + node_sleep_config_request = pw_requests.NodeSleepConfigRequest( + self.dummy_fn, + b"1111222233334444", + 5, # Duration in seconds the SED will be awake for receiving commands + 360, # Duration in minutes the SED will be in sleeping mode and not able to respond any command + 1440, # Interval in minutes the node will wake up and able to receive commands + False, # Enable/disable clock sync + 0, # Duration in minutes the node synchronize its clock + ) + assert ( + node_sleep_config_request.serialize() + == b"\x05\x05\x03\x030050111122223333444405016805A00000008C9D\r\n" + ) + + scan_configure_request = pw_requests.ScanConfigureRequest( + self.dummy_fn, + b"1111222233334444", + 5, # Delay in minutes when signal is send when no motion is detected + 30, # Sensitivity of Motion sensor (High, Medium, Off) + False, # Daylight override to only report motion when lightlevel is below calibrated level + ) + assert ( + scan_configure_request.serialize() + == b"\x05\x05\x03\x03010111112222333344441E0005025E\r\n" + ) + + @pytest.mark.asyncio + async def test_stick_network_down(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Testing timeout Circle + discovery.""" + mock_serial = MockSerial( + { + b"\x05\x05\x03\x03000AB43C\r\n": ( + "STICK INIT", + b"000000C1", # Success ack + b"0011" # msg_id + + b"0123456789012345" # stick mac + + b"00" # unknown1 + + b"00" # network_is_online + + b"0098765432101234" # circle_plus_mac + + b"4321" # network_id + + b"00", # unknown2 + ), + } + ) + monkeypatch.setattr( + pw_connection_manager, + "create_serial_connection", + mock_serial.mock_connection, + ) + monkeypatch.setattr(pw_sender, "STICK_TIME_OUT", 0.2) + monkeypatch.setattr(pw_requests, "NODE_TIME_OUT", 1.0) + stick = pw_stick.Stick(port="test_port", cache_enabled=False) + await stick.connect() + with pytest.raises(pw_exceptions.StickError): + await stick.initialize() + await stick.disconnect() + + def fake_env(self, env: str) -> str | None: + """Fake environment.""" + if env == "APPDATA": + return "appdata_folder" + if env == "~": + return "/home/usr" + return None + + def os_path_join(self, str_a: str, str_b: str) -> str: + """Join path.""" + return f"{str_a}/{str_b}" + + @pytest.mark.asyncio + async def test_cache(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Test PlugwiseCache class.""" + monkeypatch.setattr(pw_helpers_cache, "os_name", "nt") + monkeypatch.setattr(pw_helpers_cache, "os_getenv", self.fake_env) + monkeypatch.setattr(pw_helpers_cache, "os_path_expand_user", self.fake_env) + monkeypatch.setattr(pw_helpers_cache, "os_path_join", self.os_path_join) + + async def aiofiles_os_remove(file: str) -> None: + if file == "mock_folder_that_exists/file_that_exists.ext": + return + if file == "mock_folder_that_exists/nodes.cache": + return + if file == "mock_folder_that_exists/0123456789ABCDEF.cache": + return + raise pw_exceptions.CacheError("Invalid file") + + async def makedirs(cache_dir: str, exist_ok: bool) -> None: + if cache_dir == "mock_folder_that_exists": + return + if cache_dir == "non_existing_folder": + return + raise pw_exceptions.CacheError("wrong folder to create") + + monkeypatch.setattr(pw_helpers_cache, "aiofiles_os_remove", aiofiles_os_remove) + monkeypatch.setattr(pw_helpers_cache, "makedirs", makedirs) + monkeypatch.setattr(pw_helpers_cache, "ospath", MockOsPath()) + + pw_cache = pw_helpers_cache.PlugwiseCache("test-file", "non_existing_folder") + assert not pw_cache.initialized + assert pw_cache.cache_root_directory == "non_existing_folder" + with pytest.raises(pw_exceptions.CacheError): + await pw_cache.initialize_cache() + assert not pw_cache.initialized + + # test create folder + await pw_cache.initialize_cache(create_root_folder=True) + assert pw_cache.initialized + + # Windows + pw_cache = pw_helpers_cache.PlugwiseCache( + "file_that_exists.ext", "mock_folder_that_exists" + ) + pw_cache.cache_root_directory = "mock_folder_that_exists" + assert not pw_cache.initialized + + # Test raising CacheError when cache is not initialized yet + with pytest.raises(pw_exceptions.CacheError): + await pw_cache.read_cache() + await pw_cache.write_cache({"key1": "value z"}) + + await pw_cache.initialize_cache() + assert pw_cache.initialized + + # Mock reading + mock_read_data = [ + "key1;value a\n", + "key2;first duplicate is ignored\n\r", + "key2;value b|value c\n\r", + "key3;value d \r\n", + ] + file_chunks_iter = iter(mock_read_data) + mock_file_stream = MagicMock(readlines=lambda *args, **kwargs: file_chunks_iter) + with patch("aiofiles.threadpool.sync_open", return_value=mock_file_stream): + assert await pw_cache.read_cache() == { + "key1": "value a", + "key2": "value b|value c", + "key3": "value d", + } + file_chunks_iter = iter(mock_read_data) + mock_file_stream = MagicMock(readlines=lambda *args, **kwargs: file_chunks_iter) + with patch("aiofiles.threadpool.sync_open", return_value=mock_file_stream): + await pw_cache.write_cache({"key1": "value z"}) + mock_file_stream.writelines.assert_called_with( + ["key1;value z\n", "key2;value b|value c\n", "key3;value d\n"] + ) + + file_chunks_iter = iter(mock_read_data) + mock_file_stream = MagicMock(readlines=lambda *args, **kwargs: file_chunks_iter) + with patch("aiofiles.threadpool.sync_open", return_value=mock_file_stream): + await pw_cache.write_cache({"key4": "value e"}, rewrite=True) + mock_file_stream.writelines.assert_called_with( + [ + "key4;value e\n", + ] + ) + + monkeypatch.setattr(pw_helpers_cache, "os_name", "linux") + pw_cache = pw_helpers_cache.PlugwiseCache( + "file_that_exists.ext", "mock_folder_that_exists" + ) + pw_cache.cache_root_directory = "mock_folder_that_exists" + assert not pw_cache.initialized + await pw_cache.initialize_cache() + assert pw_cache.initialized + await pw_cache.delete_cache() + pw_cache.cache_root_directory = "mock_folder_that_does_not_exists" + await pw_cache.delete_cache() + pw_cache = pw_helpers_cache.PlugwiseCache( + "file_that_exists.ext", "mock_folder_that_does_not_exists" + ) + await pw_cache.delete_cache() + + @pytest.mark.asyncio + async def test_network_cache(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Test NetworkRegistrationCache class.""" + monkeypatch.setattr(pw_helpers_cache, "os_name", "nt") + monkeypatch.setattr(pw_helpers_cache, "os_getenv", self.fake_env) + monkeypatch.setattr(pw_helpers_cache, "os_path_expand_user", self.fake_env) + monkeypatch.setattr(pw_helpers_cache, "os_path_join", self.os_path_join) + + async def aiofiles_os_remove(file: str) -> None: + if file == "mock_folder_that_exists/file_that_exists.ext": + return + if file == "mock_folder_that_exists/nodes.cache": + return + if file == "mock_folder_that_exists/0123456789ABCDEF.cache": + return + raise pw_exceptions.CacheError("Invalid file") + + async def makedirs(cache_dir: str, exist_ok: bool) -> None: + if cache_dir == "mock_folder_that_exists": + return + if cache_dir == "non_existing_folder": + return + raise pw_exceptions.CacheError("wrong folder to create") + + monkeypatch.setattr(pw_helpers_cache, "aiofiles_os_remove", aiofiles_os_remove) + monkeypatch.setattr(pw_helpers_cache, "makedirs", makedirs) + monkeypatch.setattr(pw_helpers_cache, "ospath", MockOsPath()) + + pw_nw_cache = pw_network_cache.NetworkRegistrationCache( + "mock_folder_that_exists" + ) + await pw_nw_cache.initialize_cache() + # test with invalid data + mock_read_data = [ + "-1;0123456789ABCDEF;NodeType.CIRCLE_PLUS", + "0;FEDCBA9876543210xxxNodeType.CIRCLE", + "invalid129834765AFBECD|NodeType.CIRCLE", + ] + file_chunks_iter = iter(mock_read_data) + mock_file_stream = MagicMock(readlines=lambda *args, **kwargs: file_chunks_iter) + with patch("aiofiles.threadpool.sync_open", return_value=mock_file_stream): + await pw_nw_cache.restore_cache() + assert pw_nw_cache.registrations == { + -1: ("0123456789ABCDEF", pw_api.NodeType.CIRCLE_PLUS), + } + + # test with valid data + mock_read_data = [ + "-1;0123456789ABCDEF;NodeType.CIRCLE_PLUS", + "0;FEDCBA9876543210;NodeType.CIRCLE", + "1;1298347650AFBECD;NodeType.SCAN", + "2;;", + ] + file_chunks_iter = iter(mock_read_data) + mock_file_stream = MagicMock(readlines=lambda *args, **kwargs: file_chunks_iter) + with patch("aiofiles.threadpool.sync_open", return_value=mock_file_stream): + await pw_nw_cache.restore_cache() + assert pw_nw_cache.registrations == { + -1: ("0123456789ABCDEF", pw_api.NodeType.CIRCLE_PLUS), + 0: ("FEDCBA9876543210", pw_api.NodeType.CIRCLE), + 1: ("1298347650AFBECD", pw_api.NodeType.SCAN), + 2: ("", None), + } + pw_nw_cache.update_registration(3, "1234ABCD4321FEDC", pw_api.NodeType.STEALTH) + + with patch("aiofiles.threadpool.sync_open", return_value=mock_file_stream): + await pw_nw_cache.save_cache() + mock_file_stream.writelines.assert_called_with( + [ + "-1;0123456789ABCDEF|NodeType.CIRCLE_PLUS\n", + "0;FEDCBA9876543210|NodeType.CIRCLE\n", + "1;1298347650AFBECD|NodeType.SCAN\n", + "2;|\n", + "3;1234ABCD4321FEDC|NodeType.STEALTH\n", + "4;|\n", + ] + + [f"{address};|\n" for address in range(5, 64)] + ) + assert pw_nw_cache.registrations == { + -1: ("0123456789ABCDEF", pw_api.NodeType.CIRCLE_PLUS), + 0: ("FEDCBA9876543210", pw_api.NodeType.CIRCLE), + 1: ("1298347650AFBECD", pw_api.NodeType.SCAN), + 2: ("", None), + 3: ("1234ABCD4321FEDC", pw_api.NodeType.STEALTH), + } + + @pytest.mark.asyncio + async def test_node_cache(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Test NodeCache class.""" + monkeypatch.setattr(pw_helpers_cache, "ospath", MockOsPath()) + monkeypatch.setattr(pw_helpers_cache, "os_name", "nt") + monkeypatch.setattr(pw_helpers_cache, "os_getenv", self.fake_env) + monkeypatch.setattr(pw_helpers_cache, "os_path_expand_user", self.fake_env) + monkeypatch.setattr(pw_helpers_cache, "os_path_join", self.os_path_join) + + node_cache = pw_node_cache.NodeCache( + "0123456789ABCDEF", "mock_folder_that_exists" + ) + await node_cache.initialize_cache() + # test with invalid data + mock_read_data = [ + "firmware;2011-6-27-8-52-18", + "hardware;000004400107", + "node_info_timestamp;2024-3-18-19-30-28", + "node_type;2", + "relay;True", + "current_log_address;127", + "calibration_gain_a;0.9903987646102905", + "calibration_gain_b;-1.8206795857622637e-06", + "calibration_noise;0.0", + "calibration_tot;0.023882506415247917", + "energy_collection;102:4:2024-3-14-19-0-0:47|102:3:2024-3-14-18-0-0:48|102:2:2024-3-14-17-0-0:45", + ] + file_chunks_iter = iter(mock_read_data) + mock_file_stream = MagicMock(readlines=lambda *args, **kwargs: file_chunks_iter) + with patch("aiofiles.threadpool.sync_open", return_value=mock_file_stream): + await node_cache.restore_cache() + assert node_cache.states == { + "firmware": "2011-6-27-8-52-18", + "hardware": "000004400107", + "node_info_timestamp": "2024-3-18-19-30-28", + "node_type": "2", + "relay": "True", + "current_log_address": "127", + "calibration_gain_a": "0.9903987646102905", + "calibration_gain_b": "-1.8206795857622637e-06", + "calibration_noise": "0.0", + "calibration_tot": "0.023882506415247917", + "energy_collection": "102:4:2024-3-14-19-0-0:47|102:3:2024-3-14-18-0-0:48|102:2:2024-3-14-17-0-0:45", + } + assert node_cache.get_state("hardware") == "000004400107" + node_cache.update_state("current_log_address", "128") + assert node_cache.get_state("current_log_address") == "128" + node_cache.remove_state("calibration_gain_a") + assert node_cache.get_state("calibration_gain_a") is None + + with patch("aiofiles.threadpool.sync_open", return_value=mock_file_stream): + await node_cache.save_cache() + mock_file_stream.writelines.assert_called_with( + [ + "firmware;2011-6-27-8-52-18\n", + "hardware;000004400107\n", + "node_info_timestamp;2024-3-18-19-30-28\n", + "node_type;2\n", + "relay;True\n", + "current_log_address;128\n", + "calibration_gain_b;-1.8206795857622637e-06\n", + "calibration_noise;0.0\n", + "calibration_tot;0.023882506415247917\n", + "energy_collection;102:4:2024-3-14-19-0-0:47|102:3:2024-3-14-18-0-0:48|102:2:2024-3-14-17-0-0:45\n", + ] + ) + + @pytest.mark.asyncio + async def test_base_node(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Testing properties of base node.""" + + mock_stick_controller = MockStickController() + + async def load_callback(event: pw_api.NodeEvent, mac: str) -> None: # type: ignore[name-defined] + """Load callback for event.""" + + test_node = pw_sed.PlugwiseBaseNode( + "1298347650AFBECD", 1, mock_stick_controller, load_callback + ) + + # Validate base node properties which are always set + assert not test_node.is_battery_powered + + # Validate to raise exception when node is not yet loaded + with pytest.raises(pw_exceptions.NodeError): + assert await test_node.set_awake_duration(5) is not None + + with pytest.raises(pw_exceptions.NodeError): + assert test_node.battery_config is not None + + with pytest.raises(pw_exceptions.NodeError): + assert await test_node.set_clock_interval(5) is not None + + with pytest.raises(pw_exceptions.NodeError): + assert await test_node.set_clock_sync(False) is not None + + with pytest.raises(pw_exceptions.NodeError): + assert await test_node.set_sleep_duration(5) is not None + + with pytest.raises(pw_exceptions.NodeError): + assert await test_node.set_motion_daylight_mode(True) is not None + + with pytest.raises(pw_exceptions.NodeError): + assert ( + await test_node.set_motion_sensitivity_level( + pw_api.MotionSensitivity.HIGH + ) + is not None + ) + + with pytest.raises(pw_exceptions.NodeError): + assert await test_node.set_motion_reset_timer(5) is not None + + # Validate to raise NotImplementedError calling load() at basenode + with pytest.raises(NotImplementedError): + await test_node.load() + # Mark test node as loaded + test_node._loaded = True # pylint: disable=protected-access + + # Validate to raise exception when feature is not supported + with pytest.raises(pw_exceptions.FeatureError): + assert await test_node.set_awake_duration(5) is not None + + with pytest.raises(pw_exceptions.FeatureError): + assert test_node.battery_config is not None + + with pytest.raises(pw_exceptions.FeatureError): + assert await test_node.set_clock_interval(5) is not None + + with pytest.raises(pw_exceptions.FeatureError): + assert await test_node.set_clock_sync(False) is not None + + with pytest.raises(pw_exceptions.FeatureError): + assert await test_node.set_sleep_duration(5) is not None + + with pytest.raises(pw_exceptions.FeatureError): + assert await test_node.set_motion_daylight_mode(True) is not None + + with pytest.raises(pw_exceptions.FeatureError): + assert ( + await test_node.set_motion_sensitivity_level( + pw_api.MotionSensitivity.HIGH + ) + is not None + ) + + with pytest.raises(pw_exceptions.FeatureError): + assert await test_node.set_motion_reset_timer(5) is not None + + # Add battery feature to test raising not implemented + # for battery related properties + test_node._features += (pw_api.NodeFeature.BATTERY,) # pylint: disable=protected-access + with pytest.raises(NotImplementedError): + assert await test_node.set_awake_duration(5) is not None + + with pytest.raises(NotImplementedError): + assert test_node.battery_config is not None + + with pytest.raises(NotImplementedError): + assert await test_node.set_clock_interval(5) is not None + + with pytest.raises(NotImplementedError): + assert await test_node.set_clock_sync(False) is not None + + with pytest.raises(NotImplementedError): + assert await test_node.set_sleep_duration(5) is not None + + test_node._features += (pw_api.NodeFeature.MOTION,) # pylint: disable=protected-access + with pytest.raises(NotImplementedError): + assert await test_node.set_motion_daylight_mode(True) is not None + with pytest.raises(NotImplementedError): + assert ( + await test_node.set_motion_sensitivity_level( + pw_api.MotionSensitivity.HIGH + ) + is not None + ) + with pytest.raises(NotImplementedError): + assert await test_node.set_motion_reset_timer(5) is not None + + assert not test_node.cache_enabled + assert test_node.mac == "1298347650AFBECD" + + @pytest.mark.asyncio + async def test_sed_node(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Testing properties of SED.""" + + def fake_cache(dummy: object, setting: str) -> str | None: + """Fake cache retrieval.""" + if setting == pw_node.CACHE_FIRMWARE: + return "2011-6-27-8-55-44" + if setting == pw_node.CACHE_HARDWARE: + return "080007" + if setting == pw_node.CACHE_NODE_TYPE: + return "6" + if setting == pw_node.CACHE_NODE_INFO_TIMESTAMP: + return "2024-12-7-1-0-0" + if setting == pw_sed.CACHE_AWAKE_DURATION: + return "20" + if setting == pw_sed.CACHE_CLOCK_INTERVAL: + return "12600" + if setting == pw_sed.CACHE_CLOCK_SYNC: + return "True" + if setting == pw_sed.CACHE_MAINTENANCE_INTERVAL: + return "43200" + if setting == pw_sed.CACHE_SLEEP_DURATION: + return "120" + return None + + monkeypatch.setattr(pw_node.PlugwiseBaseNode, "_get_cache", fake_cache) + mock_stick_controller = MockStickController() + + async def load_callback(event: pw_api.NodeEvent, mac: str) -> None: # type: ignore[name-defined] + """Load callback for event.""" + + test_sed = pw_sed.NodeSED( + "1298347650AFBECD", 1, mock_stick_controller, load_callback + ) + assert not test_sed.cache_enabled + + # Validate SED properties raise exception when node is not yet loaded + with pytest.raises(pw_exceptions.NodeError): + assert test_sed.battery_config is not None + + with pytest.raises(pw_exceptions.NodeError): + assert test_sed.battery_config is not None + + with pytest.raises(pw_exceptions.NodeError): + assert await test_sed.set_maintenance_interval(10) + + assert test_sed.node_info.is_battery_powered + assert test_sed.is_battery_powered + assert await test_sed.load() + assert sorted(test_sed.features) == sorted( + ( + pw_api.NodeFeature.AVAILABLE, + pw_api.NodeFeature.BATTERY, + pw_api.NodeFeature.INFO, + pw_api.NodeFeature.PING, + ) + ) + + sed_config_accepted = pw_responses.NodeResponse() + sed_config_accepted.deserialize( + construct_message(b"000000F65555555555555555", b"0000") + ) + sed_config_failed = pw_responses.NodeResponse() + sed_config_failed.deserialize( + construct_message(b"000000F75555555555555555", b"0000") + ) + + # test awake duration + assert test_sed.awake_duration == 10 + assert test_sed.battery_config.awake_duration == 10 + with pytest.raises(ValueError): + assert await test_sed.set_awake_duration(0) + with pytest.raises(ValueError): + assert await test_sed.set_awake_duration(256) + assert not await test_sed.set_awake_duration(10) + assert not test_sed.sed_config_task_scheduled + assert await test_sed.set_awake_duration(15) + assert test_sed.sed_config_task_scheduled + assert test_sed.battery_config.awake_duration == 15 + assert test_sed.awake_duration == 15 + + # Restore to original settings after failed config + awake_response1 = pw_responses.NodeAwakeResponse() + awake_response1.deserialize( + construct_message(b"004F555555555555555500", b"FFFE") + ) + mock_stick_controller.send_response = sed_config_failed + await test_sed._awake_response(awake_response1) # pylint: disable=protected-access + await asyncio.sleep(0.001) # Ensure time for task to be executed + assert not test_sed.sed_config_task_scheduled + assert test_sed.battery_config.awake_duration == 10 + assert test_sed.awake_duration == 10 + + # Successful config + awake_response2 = pw_responses.NodeAwakeResponse() + awake_response2.deserialize( + construct_message(b"004F555555555555555500", b"FFFE") + ) + awake_response2.timestamp = awake_response1.timestamp + td( + seconds=pw_sed.AWAKE_RETRY + ) + assert await test_sed.set_awake_duration(15) + assert test_sed.sed_config_task_scheduled + mock_stick_controller.send_response = sed_config_accepted + await test_sed._awake_response(awake_response2) # pylint: disable=protected-access + await asyncio.sleep(0.001) # Ensure time for task to be executed + assert not test_sed.sed_config_task_scheduled + assert test_sed.battery_config.awake_duration == 15 + assert test_sed.awake_duration == 15 + + # test maintenance interval + assert test_sed.maintenance_interval == 60 + assert test_sed.battery_config.maintenance_interval == 60 + with pytest.raises(ValueError): + assert await test_sed.set_maintenance_interval(0) + with pytest.raises(ValueError): + assert await test_sed.set_maintenance_interval(65536) + assert not await test_sed.set_maintenance_interval(60) + assert await test_sed.set_maintenance_interval(30) + assert test_sed.sed_config_task_scheduled + awake_response3 = pw_responses.NodeAwakeResponse() + awake_response3.deserialize( + construct_message(b"004F555555555555555500", b"FFFE") + ) + awake_response3.timestamp = awake_response2.timestamp + td( + seconds=pw_sed.AWAKE_RETRY + ) + await test_sed._awake_response(awake_response3) # pylint: disable=protected-access + await asyncio.sleep(0.001) # Ensure time for task to be executed + assert not test_sed.sed_config_task_scheduled + assert test_sed.battery_config.maintenance_interval == 30 + assert test_sed.maintenance_interval == 30 + + # test clock interval + assert test_sed.clock_interval == 25200 + assert test_sed.battery_config.clock_interval == 25200 + with pytest.raises(ValueError): + assert await test_sed.set_clock_interval(0) + with pytest.raises(ValueError): + assert await test_sed.set_clock_interval(65536) + assert not await test_sed.set_clock_interval(25200) + assert await test_sed.set_clock_interval(12600) + assert test_sed.sed_config_task_scheduled + awake_response4 = pw_responses.NodeAwakeResponse() + awake_response4.deserialize( + construct_message(b"004F555555555555555500", b"FFFE") + ) + awake_response4.timestamp = awake_response3.timestamp + td( + seconds=pw_sed.AWAKE_RETRY + ) + await test_sed._awake_response(awake_response4) # pylint: disable=protected-access + await asyncio.sleep(0.001) # Ensure time for task to be executed + assert not test_sed.sed_config_task_scheduled + assert test_sed.battery_config.clock_interval == 12600 + assert test_sed.clock_interval == 12600 + + # test clock sync + assert not test_sed.clock_sync + assert not test_sed.battery_config.clock_sync + assert not await test_sed.set_clock_sync(False) + assert await test_sed.set_clock_sync(True) + assert test_sed.sed_config_task_scheduled + awake_response5 = pw_responses.NodeAwakeResponse() + awake_response5.deserialize( + construct_message(b"004F555555555555555500", b"FFFE") + ) + awake_response5.timestamp = awake_response4.timestamp + td( + seconds=pw_sed.AWAKE_RETRY + ) + await test_sed._awake_response(awake_response5) # pylint: disable=protected-access + await asyncio.sleep(0.001) # Ensure time for task to be executed + assert not test_sed.sed_config_task_scheduled + assert test_sed.battery_config.clock_sync + assert test_sed.clock_sync + + # test sleep duration + assert test_sed.sleep_duration == 60 + assert test_sed.battery_config.sleep_duration == 60 + with pytest.raises(ValueError): + assert await test_sed.set_sleep_duration(0) + with pytest.raises(ValueError): + assert await test_sed.set_sleep_duration(65536) + assert not await test_sed.set_sleep_duration(60) + assert await test_sed.set_sleep_duration(120) + assert test_sed.sed_config_task_scheduled + awake_response6 = pw_responses.NodeAwakeResponse() + awake_response6.deserialize( + construct_message(b"004F555555555555555500", b"FFFE") + ) + awake_response6.timestamp = awake_response5.timestamp + td( + seconds=pw_sed.AWAKE_RETRY + ) + await test_sed._awake_response(awake_response6) # pylint: disable=protected-access + await asyncio.sleep(0.001) # Ensure time for task to be executed + assert not test_sed.sed_config_task_scheduled + assert test_sed.battery_config.sleep_duration == 120 + assert test_sed.sleep_duration == 120 + + @pytest.mark.asyncio + async def test_scan_node(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Testing properties of scan.""" + + def fake_cache(dummy: object, setting: str) -> str | None: + """Fake cache retrieval.""" + if setting == pw_node.CACHE_FIRMWARE: + return "2011-6-27-8-55-44" + if setting == pw_node.CACHE_HARDWARE: + return "080007" + if setting == pw_node.CACHE_NODE_TYPE: + return "6" + if setting == pw_node.CACHE_NODE_INFO_TIMESTAMP: + return "2024-12-7-1-0-0" + if setting == pw_sed.CACHE_AWAKE_DURATION: + return "20" + if setting == pw_sed.CACHE_CLOCK_INTERVAL: + return "12600" + if setting == pw_sed.CACHE_CLOCK_SYNC: + return "True" + if setting == pw_sed.CACHE_MAINTENANCE_INTERVAL: + return "43200" + if setting == pw_sed.CACHE_SLEEP_DURATION: + return "120" + if setting == pw_scan.CACHE_MOTION_STATE: + return "False" + if setting == pw_scan.CACHE_MOTION_TIMESTAMP: + return "2024-12-6-1-0-0" + if setting == pw_scan.CACHE_MOTION_RESET_TIMER: + return "10" + if setting == pw_scan.CACHE_SCAN_SENSITIVITY: + return "MEDIUM" + if setting == pw_scan.CACHE_SCAN_DAYLIGHT_MODE: + return "True" + return None + + monkeypatch.setattr(pw_node.PlugwiseBaseNode, "_get_cache", fake_cache) + mock_stick_controller = MockStickController() + + scan_config_accepted = pw_responses.NodeAckResponse() + scan_config_accepted.deserialize( + construct_message(b"0100555555555555555500BE", b"0000") + ) + scan_config_failed = pw_responses.NodeAckResponse() + scan_config_failed.deserialize( + construct_message(b"0100555555555555555500BF", b"0000") + ) + + async def load_callback(event: pw_api.NodeEvent, mac: str) -> None: # type: ignore[name-defined] + """Load callback for event.""" + + test_scan = pw_scan.PlugwiseScan( + "1298347650AFBECD", 1, mock_stick_controller, load_callback + ) + assert not test_scan.cache_enabled + + await test_scan.update_node_details( + firmware=dt(2011, 6, 27, 8, 55, 44, tzinfo=UTC), + hardware="080007", + node_type=None, + timestamp=None, + relay_state=None, + logaddress_pointer=None, + ) + assert await test_scan.load() + + # test motion reset timer + assert test_scan.reset_timer == 10 + assert test_scan.motion_config.reset_timer == 10 + with pytest.raises(ValueError): + assert await test_scan.set_motion_reset_timer(0) + with pytest.raises(ValueError): + assert await test_scan.set_motion_reset_timer(256) + assert not await test_scan.set_motion_reset_timer(10) + assert not test_scan.scan_config_task_scheduled + assert await test_scan.set_motion_reset_timer(15) + assert test_scan.scan_config_task_scheduled + assert test_scan.reset_timer == 15 + assert test_scan.motion_config.reset_timer == 15 + + # Restore to original settings after failed config + awake_response1 = pw_responses.NodeAwakeResponse() + awake_response1.deserialize( + construct_message(b"004F555555555555555500", b"FFFE") + ) + mock_stick_controller.send_response = scan_config_failed + await test_scan._awake_response(awake_response1) # pylint: disable=protected-access + await asyncio.sleep(0.001) # Ensure time for task to be executed + assert not test_scan.scan_config_task_scheduled + + # Successful config + awake_response2 = pw_responses.NodeAwakeResponse() + awake_response2.deserialize( + construct_message(b"004F555555555555555500", b"FFFE") + ) + awake_response2.timestamp = awake_response1.timestamp + td( + seconds=pw_sed.AWAKE_RETRY + ) + mock_stick_controller.send_response = scan_config_accepted + assert await test_scan.set_motion_reset_timer(25) + assert test_scan.scan_config_task_scheduled + await test_scan._awake_response(awake_response2) # pylint: disable=protected-access + await asyncio.sleep(0.001) # Ensure time for task to be executed + assert not test_scan.scan_config_task_scheduled + assert test_scan.reset_timer == 25 + assert test_scan.motion_config.reset_timer == 25 + + # test motion daylight mode + assert not test_scan.daylight_mode + assert not test_scan.motion_config.daylight_mode + assert not await test_scan.set_motion_daylight_mode(False) + assert not test_scan.scan_config_task_scheduled + assert await test_scan.set_motion_daylight_mode(True) + assert test_scan.scan_config_task_scheduled + awake_response3 = pw_responses.NodeAwakeResponse() + awake_response3.deserialize( + construct_message(b"004F555555555555555500", b"FFFE") + ) + awake_response3.timestamp = awake_response2.timestamp + td( + seconds=pw_sed.AWAKE_RETRY + ) + await test_scan._awake_response(awake_response3) # pylint: disable=protected-access + await asyncio.sleep(0.001) # Ensure time for task to be executed + assert not test_scan.scan_config_task_scheduled + assert test_scan.daylight_mode + assert test_scan.motion_config.daylight_mode + + # test motion sensitivity level + assert test_scan.sensitivity_level == pw_api.MotionSensitivity.MEDIUM + assert ( + test_scan.motion_config.sensitivity_level == pw_api.MotionSensitivity.MEDIUM + ) + assert not await test_scan.set_motion_sensitivity_level( + pw_api.MotionSensitivity.MEDIUM + ) + assert not test_scan.scan_config_task_scheduled + assert await test_scan.set_motion_sensitivity_level( + pw_api.MotionSensitivity.HIGH + ) + assert test_scan.scan_config_task_scheduled + awake_response4 = pw_responses.NodeAwakeResponse() + awake_response4.deserialize( + construct_message(b"004F555555555555555500", b"FFFE") + ) + awake_response4.timestamp = awake_response3.timestamp + td( + seconds=pw_sed.AWAKE_RETRY + ) + await test_scan._awake_response(awake_response4) # pylint: disable=protected-access + await asyncio.sleep(0.001) # Ensure time for task to be executed + assert not test_scan.scan_config_task_scheduled + assert test_scan.sensitivity_level == pw_api.MotionSensitivity.HIGH + assert ( + test_scan.motion_config.sensitivity_level == pw_api.MotionSensitivity.HIGH + ) + + # scan with cache enabled + mock_stick_controller.send_response = None + test_scan = pw_scan.PlugwiseScan( + "1298347650AFBECD", 1, mock_stick_controller, load_callback + ) + await test_scan.update_node_details( + firmware=dt(2011, 6, 27, 8, 55, 44, tzinfo=UTC), + hardware="080007", + node_type=None, + timestamp=None, + relay_state=None, + logaddress_pointer=None, + ) + test_scan.cache_enabled = True + assert await test_scan.load() + assert sorted(test_scan.features) == sorted( + ( + pw_api.NodeFeature.AVAILABLE, + pw_api.NodeFeature.BATTERY, + pw_api.NodeFeature.INFO, + pw_api.NodeFeature.MOTION, + pw_api.NodeFeature.MOTION_CONFIG, + pw_api.NodeFeature.PING, + ) + ) + + state = await test_scan.get_state( + ( + pw_api.NodeFeature.AVAILABLE, + pw_api.NodeFeature.BATTERY, + pw_api.NodeFeature.INFO, + pw_api.NodeFeature.MOTION, + pw_api.NodeFeature.MOTION_CONFIG, + ) + ) + assert not state[pw_api.NodeFeature.AVAILABLE].state + + @pytest.mark.asyncio + async def test_switch_node(self, monkeypatch: pytest.MonkeyPatch) -> None: + """Testing properties of switch.""" + + def fake_cache(dummy: object, setting: str) -> str | None: + """Fake cache retrieval.""" + if setting == pw_node.CACHE_FIRMWARE: + return "2011-5-13-7-26-54" + if setting == pw_node.CACHE_HARDWARE: + return "080029" + if setting == pw_node.CACHE_NODE_TYPE: + return "3" + if setting == pw_node.CACHE_NODE_INFO_TIMESTAMP: + return "2024-12-7-1-0-0" + if setting == pw_sed.CACHE_AWAKE_DURATION: + return "15" + if setting == pw_sed.CACHE_CLOCK_INTERVAL: + return "14600" + if setting == pw_sed.CACHE_CLOCK_SYNC: + return "False" + if setting == pw_sed.CACHE_MAINTENANCE_INTERVAL: + return "900" + if setting == pw_sed.CACHE_SLEEP_DURATION: + return "180" + return None + + monkeypatch.setattr(pw_node.PlugwiseBaseNode, "_get_cache", fake_cache) + mock_stick_controller = MockStickController() + + async def load_callback(event: pw_api.NodeEvent, mac: str) -> None: # type: ignore[name-defined] + """Load callback for event.""" + + test_switch = pw_switch.PlugwiseSwitch( + "1298347650AFBECD", 1, mock_stick_controller, load_callback + ) + assert not test_switch.cache_enabled + + assert sorted(test_switch.features) == sorted( + ( + pw_api.NodeFeature.AVAILABLE, + pw_api.NodeFeature.INFO, + pw_api.NodeFeature.PING, + ) + ) + await test_switch.update_node_details( + firmware=dt(2011, 6, 27, 9, 4, 10, tzinfo=UTC), + hardware="070051", + node_type=None, + timestamp=None, + relay_state=None, + logaddress_pointer=None, + ) + assert await test_switch.load() + + # Switch specific defaults + assert test_switch.switch is False + + # switch with cache enabled + test_switch = pw_switch.PlugwiseSwitch( + "1298347650AFBECD", 1, mock_stick_controller, load_callback + ) + await test_switch.update_node_details( + firmware=dt(2011, 6, 27, 9, 4, 10, tzinfo=UTC), + hardware="070051", + node_type=None, + timestamp=None, + relay_state=None, + logaddress_pointer=None, + ) + test_switch.cache_enabled = True + assert test_switch.cache_enabled is True + assert await test_switch.load() + assert sorted(test_switch.features) == sorted( + ( + pw_api.NodeFeature.AVAILABLE, + pw_api.NodeFeature.BATTERY, + pw_api.NodeFeature.INFO, + pw_api.NodeFeature.PING, + pw_api.NodeFeature.SWITCH, + ) + ) + + state = await test_switch.get_state( + ( + pw_api.NodeFeature.AVAILABLE, + pw_api.NodeFeature.BATTERY, + pw_api.NodeFeature.INFO, + pw_api.NodeFeature.SWITCH, + ) + ) + assert not state[pw_api.NodeFeature.AVAILABLE].state + + @pytest.mark.asyncio + async def test_node_discovery_and_load( + self, monkeypatch: pytest.MonkeyPatch + ) -> None: + """Testing discovery of nodes.""" + mock_serial = MockSerial(None) + monkeypatch.setattr( + pw_connection_manager, + "create_serial_connection", + mock_serial.mock_connection, + ) + monkeypatch.setattr(pw_sender, "STICK_TIME_OUT", 0.2) + monkeypatch.setattr(pw_requests, "NODE_TIME_OUT", 2.0) + monkeypatch.setattr(pw_helpers_cache, "ospath", MockOsPath()) + monkeypatch.setattr(pw_helpers_cache, "os_name", "nt") + monkeypatch.setattr(pw_helpers_cache, "os_getenv", self.fake_env) + monkeypatch.setattr(pw_helpers_cache, "os_path_expand_user", self.fake_env) + monkeypatch.setattr(pw_helpers_cache, "os_path_join", self.os_path_join) + mock_read_data = [""] + file_chunks_iter = iter(mock_read_data) + mock_file_stream = MagicMock(readlines=lambda *args, **kwargs: file_chunks_iter) + + stick = pw_stick.Stick("test_port", cache_enabled=True) + await stick.connect() + with patch("aiofiles.threadpool.sync_open", return_value=mock_file_stream): + await stick.initialize() + await stick.discover_nodes(load=True) + + assert len(stick.nodes) == 6 + + assert stick.nodes["0098765432101234"].is_loaded + assert stick.nodes["0098765432101234"].name == "Circle + 01234" + assert stick.nodes["0098765432101234"].node_info.firmware == dt( + 2011, 6, 27, 8, 47, 37, tzinfo=UTC + ) + assert stick.nodes["0098765432101234"].node_info.version == "070073" + assert stick.nodes["0098765432101234"].node_info.model == "Circle +" + assert stick.nodes["0098765432101234"].node_info.model_type == "type F" + assert stick.nodes["0098765432101234"].node_info.name == "Circle + 01234" + assert stick.nodes["0098765432101234"].available + assert not stick.nodes["0098765432101234"].node_info.is_battery_powered + assert not stick.nodes["0098765432101234"].is_battery_powered + assert stick.nodes["0098765432101234"].network_address == -1 + assert stick.nodes["0098765432101234"].cache_folder == "" + assert not stick.nodes["0098765432101234"].cache_folder_create + assert stick.nodes["0098765432101234"].cache_enabled + + # Check an unsupported state feature raises an error + with pytest.raises(pw_exceptions.NodeError): + await stick.nodes["0098765432101234"].get_state( + (pw_api.NodeFeature.MOTION,) + ) + + # Get state + get_state_timestamp = dt.now(UTC).replace(minute=0, second=0, microsecond=0) + state = await stick.nodes["0098765432101234"].get_state( + ( + pw_api.NodeFeature.AVAILABLE, + pw_api.NodeFeature.PING, + pw_api.NodeFeature.INFO, + pw_api.NodeFeature.RELAY, + ) + ) + + # Check Available + assert state[pw_api.NodeFeature.AVAILABLE].state + assert ( + state[pw_api.NodeFeature.AVAILABLE].last_seen.replace( + minute=0, second=0, microsecond=0 + ) + == get_state_timestamp + ) + + # Check Ping + assert state[pw_api.NodeFeature.PING].rssi_in == 69 + assert state[pw_api.NodeFeature.PING].rssi_out == 70 + assert state[pw_api.NodeFeature.PING].rtt == 1074 + assert ( + state[pw_api.NodeFeature.PING].timestamp.replace( + minute=0, second=0, microsecond=0 + ) + == get_state_timestamp + ) + assert stick.nodes["0098765432101234"].ping_stats.rssi_in == 69 + assert stick.nodes["0098765432101234"].ping_stats.rssi_out == 70 + assert stick.nodes["0098765432101234"].ping_stats.rtt == 1074 + assert ( + stick.nodes["0098765432101234"].ping_stats.timestamp.replace( + minute=0, second=0, microsecond=0 + ) + == get_state_timestamp + ) + + # Check INFO + assert state[pw_api.NodeFeature.INFO].mac == "0098765432101234" + assert state[pw_api.NodeFeature.INFO].zigbee_address == -1 + assert not state[pw_api.NodeFeature.INFO].is_battery_powered + assert sorted(state[pw_api.NodeFeature.INFO].features) == sorted( + ( + pw_api.NodeFeature.AVAILABLE, + pw_api.NodeFeature.INFO, + pw_api.NodeFeature.PING, + pw_api.NodeFeature.RELAY, + pw_api.NodeFeature.ENERGY, + pw_api.NodeFeature.POWER, + ) + ) + assert state[pw_api.NodeFeature.INFO].firmware == dt( + 2011, 6, 27, 8, 47, 37, tzinfo=UTC + ) + assert state[pw_api.NodeFeature.INFO].name == "Circle + 01234" + assert state[pw_api.NodeFeature.INFO].model == "Circle +" + assert state[pw_api.NodeFeature.INFO].model_type == "type F" + assert state[pw_api.NodeFeature.INFO].node_type == pw_api.NodeType.CIRCLE_PLUS + assert ( + state[pw_api.NodeFeature.INFO].timestamp.replace( + minute=0, second=0, microsecond=0 + ) + == get_state_timestamp + ) + assert state[pw_api.NodeFeature.INFO].version == "070073" + + assert state[pw_api.NodeFeature.RELAY].state + + # Check 1111111111111111 + get_state_timestamp = dt.now(UTC).replace(minute=0, second=0, microsecond=0) + state = await stick.nodes["1111111111111111"].get_state( + (pw_api.NodeFeature.PING, pw_api.NodeFeature.INFO, pw_api.NodeFeature.RELAY) + ) + + assert state[pw_api.NodeFeature.INFO].mac == "1111111111111111" + assert state[pw_api.NodeFeature.INFO].zigbee_address == 0 + assert not state[pw_api.NodeFeature.INFO].is_battery_powered + assert state[pw_api.NodeFeature.INFO].version == "070140" + assert state[pw_api.NodeFeature.INFO].node_type == pw_api.NodeType.CIRCLE + assert ( + state[pw_api.NodeFeature.INFO].timestamp.replace( + minute=0, second=0, microsecond=0 + ) + == get_state_timestamp + ) + assert sorted(state[pw_api.NodeFeature.INFO].features) == sorted( + ( + pw_api.NodeFeature.AVAILABLE, + pw_api.NodeFeature.INFO, + pw_api.NodeFeature.PING, + pw_api.NodeFeature.RELAY, + pw_api.NodeFeature.ENERGY, + pw_api.NodeFeature.POWER, + ) + ) + assert state[pw_api.NodeFeature.AVAILABLE].state + assert state[pw_api.NodeFeature.RELAY].state + + # region Scan + self.test_node_awake = asyncio.Future() + unsub_awake = stick.subscribe_to_node_events( + node_event_callback=self.node_awake, + events=(pw_api.NodeEvent.AWAKE,), + ) + mock_serial.inject_message(b"004F555555555555555500", b"FFFE") + assert await self.test_node_awake + unsub_awake() + + assert stick.nodes["5555555555555555"].node_info.firmware == dt( + 2011, 6, 27, 8, 55, 44, tzinfo=UTC + ) + assert stick.nodes["5555555555555555"].node_info.version == "080007" + assert stick.nodes["5555555555555555"].node_info.model == "Scan" + assert stick.nodes["5555555555555555"].node_info.model_type == None + assert stick.nodes["5555555555555555"].available + assert stick.nodes["5555555555555555"].node_info.is_battery_powered + assert sorted(stick.nodes["5555555555555555"].features) == sorted( + ( + pw_api.NodeFeature.AVAILABLE, + pw_api.NodeFeature.BATTERY, + pw_api.NodeFeature.INFO, + pw_api.NodeFeature.PING, + pw_api.NodeFeature.MOTION, + pw_api.NodeFeature.MOTION_CONFIG, + ) + ) + state = await stick.nodes["5555555555555555"].get_state( + ( + pw_api.NodeFeature.AVAILABLE, + pw_api.NodeFeature.BATTERY, + pw_api.NodeFeature.INFO, + pw_api.NodeFeature.MOTION, + pw_api.NodeFeature.MOTION_CONFIG, + ) + ) + assert state[pw_api.NodeFeature.AVAILABLE].state + assert state[pw_api.NodeFeature.BATTERY].maintenance_interval == 60 + assert state[pw_api.NodeFeature.BATTERY].awake_duration == 10 + assert not state[pw_api.NodeFeature.BATTERY].clock_sync + assert state[pw_api.NodeFeature.BATTERY].clock_interval == 25200 + assert state[pw_api.NodeFeature.BATTERY].sleep_duration == 60 + + # Motion + self.test_motion_on = asyncio.Future() + self.test_motion_off = asyncio.Future() + unsub_motion = stick.nodes["5555555555555555"].subscribe_to_feature_update( + node_feature_callback=self.node_motion_state, + features=(pw_api.NodeFeature.MOTION,), + ) + # Inject motion message to trigger a 'motion on' event + mock_serial.inject_message(b"005655555555555555550001", b"FFFF") + motion_on = await self.test_motion_on + assert motion_on + assert stick.nodes["5555555555555555"].motion + + # Inject motion message to trigger a 'motion off' event + mock_serial.inject_message(b"005655555555555555550000", b"FFFF") + motion_off = await self.test_motion_off + assert not motion_off + assert not stick.nodes["5555555555555555"].motion + unsub_motion() + # endregion + + # region Switch + self.test_node_loaded = asyncio.Future() + unsub_loaded = stick.subscribe_to_node_events( + node_event_callback=self.node_loaded, + events=(pw_api.NodeEvent.LOADED,), + ) + mock_serial.inject_message(b"004F888888888888888800", b"FFFE") + assert await self.test_node_loaded + unsub_loaded() + + assert stick.nodes["8888888888888888"].node_info.firmware == dt( + 2011, 6, 27, 9, 4, 10, tzinfo=UTC + ) + assert stick.nodes["8888888888888888"].node_info.version == "070051" + assert stick.nodes["8888888888888888"].node_info.model == "Switch" + assert stick.nodes["8888888888888888"].node_info.model_type == None + assert stick.nodes["8888888888888888"].available + assert stick.nodes["8888888888888888"].node_info.is_battery_powered + assert sorted(stick.nodes["8888888888888888"].features) == sorted( + ( + pw_api.NodeFeature.AVAILABLE, + pw_api.NodeFeature.BATTERY, + pw_api.NodeFeature.INFO, + pw_api.NodeFeature.PING, + pw_api.NodeFeature.SWITCH, + ) + ) + state = await stick.nodes["8888888888888888"].get_state( + ( + pw_api.NodeFeature.AVAILABLE, + pw_api.NodeFeature.BATTERY, + pw_api.NodeFeature.INFO, + pw_api.NodeFeature.SWITCH, + ) + ) + # endregion + # test disable cache + assert stick.cache_enabled + stick.cache_enabled = False + assert not stick.cache_enabled -# No tests available -class TestPlugwise: # pylint: disable=attribute-defined-outside-init - """Tests for Plugwise USB.""" + # test changing cache_folder + assert stick.cache_folder == "" + stick.cache_folder = "mock_folder_that_exists" + assert stick.cache_folder == "mock_folder_that_exists" - async def test_connect_legacy_anna(self): - """No tests available.""" - assert True + with patch("aiofiles.threadpool.sync_open", return_value=mock_file_stream): + await stick.disconnect() + await asyncio.sleep(1)