Skip to content
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
86 changes: 24 additions & 62 deletions plugwise_usb/nodes/circle.py
Original file line number Diff line number Diff line change
Expand Up @@ -402,6 +402,13 @@ async def energy_update(self) -> EnergyStatistics | None: # noqa: PLR0911 PLR09
)
return None

if self._cache_enabled:
_LOGGER.debug(
"Saving initial energy_logs to cache for %s", self._mac_in_str
)
await self._energy_log_records_save_to_cache()
await self.save_cache()

if (
missing_addresses := self._energy_counters.log_addresses_missing
) is not None:
Expand Down Expand Up @@ -515,7 +522,11 @@ async def _get_initial_energy_logs(self) -> None:
total_addresses -= 1

if self._cache_enabled:
_LOGGER.debug(
"Saving initial energy_logs to cache for %s", self._mac_in_str
)
await self._energy_log_records_save_to_cache()
await self.save_cache()

async def get_missing_energy_logs(self) -> None:
"""Task to retrieve missing energy logs."""
Expand Down Expand Up @@ -543,7 +554,11 @@ async def get_missing_energy_logs(self) -> None:
await task

if self._cache_enabled:
_LOGGER.debug(
"Saving missing energy_logs to cache for %s", self._mac_in_str
)
await self._energy_log_records_save_to_cache()
await self.save_cache()

async def energy_log_update(self, address: int | None) -> tuple[bool, bool]:
"""Request energy log statistics from node. Returns true if successful."""
Expand All @@ -568,7 +583,6 @@ async def energy_log_update(self, address: int | None) -> tuple[bool, bool]:

_LOGGER.debug("EnergyLogs data from %s, address=%s", self._mac_in_str, address)
await self._available_update_state(True, response.timestamp)
energy_record_update = False

# Forward historical energy log information to energy counters
# Each response message contains 4 log counters (slots) of the
Expand All @@ -582,14 +596,14 @@ async def energy_log_update(self, address: int | None) -> tuple[bool, bool]:
if log_timestamp is None or log_pulses is None:
self._energy_counters.add_empty_log(response.log_address, _slot)
empty_log = True
elif await self._energy_log_record_update_state(
response.log_address,
_slot,
log_timestamp.replace(tzinfo=UTC),
log_pulses,
import_only=True,
):
energy_record_update = True
else:
self._energy_counters.add_pulse_log(
response.log_address,
_slot,
log_timestamp.replace(tzinfo=UTC),
log_pulses,
import_only=True,
)
Comment on lines +599 to +606
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

💡 Verification agent

🧩 Analysis chain

Regression risk: removing incremental cache save leaves single-missing-address path unsaved

Now that cache persistence is centralized outside energy_log_update, the energy_update branch where exactly one missing address is handled returns early (after calling energy_log_update) without scheduling the background task and without persisting. If the process restarts before another persistence point (rollover, bulk retrieval, unload), those newly imported logs won’t be on disk.

Fix: persist before returning in the single-missing-address branch of energy_update.
Outside the selected range, adjust around Lines 423-433:

             if len(missing_addresses) == 1:
                 result, _ = await self.energy_log_update(missing_addresses[0])
                 if result:
+                    if self._cache_enabled:
+                        # Persist immediately when resolving a single missing address
+                        await self._energy_log_records_save_to_cache()
+                        await self.save_cache()
                     await self.power_update()
                     _LOGGER.debug(
                         "async_energy_update for %s | single energy log is missing | %s",
                         self._mac_in_str,
                         missing_addresses,
                     )
                     return self._energy_counters.energy_statistics

Verification script: ensure energy_log_update isn’t called from other places that now need explicit persistence.


🏁 Script executed:

#!/bin/bash
# Find all call sites of energy_log_update and show brief context
rg -n -A 3 -B 3 $'energy_log_update\\('

# Optional: ensure single-missing-address branch exists as expected
rg -n -A 8 -B 8 $'len\\(missing_addresses\\) == 1' plugwise_usb/nodes/circle.py

Length of output: 4690


Add immediate cache persistence for single‐missing‐address branch

In the async_energy_update method, when exactly one missing address is handled, we call energy_log_update and then return without persisting the cache. If the process exits before the next bulk‐save (rollover, multi‐fetch, or unload), those newly imported logs will be lost.

• File: plugwise_usb/nodes/circle.py
• Location: around line 423 in async_energy_update

Suggested diff:

             if len(missing_addresses) == 1:
-                result, _ = await self.energy_log_update(missing_addresses[0])
-                if result:
-                    await self.power_update()
-                    _LOGGER.debug(
-                        "async_energy_update for %s | single energy log is missing | %s",
-                        self._mac_in_str,
-                        missing_addresses,
-                    )
-                return self._energy_counters.energy_statistics
+                result, _ = await self.energy_log_update(missing_addresses[0])
+                if result:
+                    # Persist immediately when resolving a single missing address
+                    if self._cache_enabled:
+                        await self._energy_log_records_save_to_cache()
+                        await self.save_cache()
+                    await self.power_update()
+                    _LOGGER.debug(
+                        "async_energy_update for %s | single energy log is missing | %s",
+                        self._mac_in_str,
+                        missing_addresses,
+                    )
+                return self._energy_counters.energy_statistics

This ensures that as soon as a solitary missing log is fetched, it’s written to disk.

Committable suggestion skipped: line range outside the PR's diff.

🤖 Prompt for AI Agents
In plugwise_usb/nodes/circle.py around line 423 in the async_energy_update
method, after calling energy_log_update for the single missing address case, add
code to immediately persist the cache to disk. This prevents loss of newly
imported logs if the process exits before the next bulk save. Locate the branch
handling exactly one missing address, call the cache persistence method right
after energy_log_update, and before returning.

if not last_energy_timestamp_collected:
# Collect the timestamp of the most recent response
self._last_collected_energy_timestamp = log_timestamp.replace(
Expand All @@ -603,12 +617,6 @@ async def energy_log_update(self, address: int | None) -> tuple[bool, bool]:

result = True
self._energy_counters.update()
if energy_record_update:
_LOGGER.debug(
"Saving energy record update to cache for %s", self._mac_in_str
)
await self.save_cache()

return result, empty_log

async def _energy_log_records_load_from_cache(self) -> bool:
Expand Down Expand Up @@ -688,55 +696,8 @@ async def _energy_log_records_save_to_cache(self) -> None:
cached_logs += f"-{log.timestamp.hour}-{log.timestamp.minute}"
cached_logs += f"-{log.timestamp.second}:{log.pulses}"

_LOGGER.debug("Saving energy logrecords to cache for %s", self._mac_in_str)
self._set_cache(CACHE_ENERGY_COLLECTION, cached_logs)

async def _energy_log_record_update_state(
self,
address: int,
slot: int,
timestamp: datetime,
pulses: int,
import_only: bool = False,
) -> bool:
"""Process new energy log record. Returns true if record is new or changed."""
self._energy_counters.add_pulse_log(
address, slot, timestamp, pulses, import_only=import_only
)
if not self._cache_enabled:
return False

log_cache_record = f"{address}:{slot}:{timestamp.year}"
log_cache_record += f"-{timestamp.month}-{timestamp.day}"
log_cache_record += f"-{timestamp.hour}-{timestamp.minute}"
log_cache_record += f"-{timestamp.second}:{pulses}"
if (cached_logs := self._get_cache(CACHE_ENERGY_COLLECTION)) is not None:
if log_cache_record not in cached_logs:
_LOGGER.debug(
"Adding logrecord (%s, %s) to cache of %s",
str(address),
str(slot),
self._mac_in_str,
)
self._set_cache(
CACHE_ENERGY_COLLECTION, cached_logs + "|" + log_cache_record
)
return True

_LOGGER.debug(
"Energy logrecord already present for %s, ignoring", self._mac_in_str
)
return False

_LOGGER.debug(
"Cache is empty, adding new logrecord (%s, %s) for %s",
str(address),
str(slot),
self._mac_in_str,
)
self._set_cache(CACHE_ENERGY_COLLECTION, log_cache_record)
return True

@raise_not_loaded
async def set_relay(self, state: bool) -> bool:
"""Change the state of the relay."""
Expand Down Expand Up @@ -1081,6 +1042,7 @@ async def unload(self) -> None:

if self._cache_enabled:
await self._energy_log_records_save_to_cache()
await self.save_cache()

await super().unload()

Expand Down
Loading