Added documentation to eventhandler and smartobject

This commit is contained in:
2026-04-17 19:24:23 +02:00
parent abdc962500
commit 7dd39e0ea9
2 changed files with 274 additions and 72 deletions

View File

@@ -1,6 +1,53 @@
import appdaemon.plugins.hass.hassapi as hass
import pickle
import os
# =============================================================================
# EventDispatcher / EventHandler — HA event subscription helpers
# =============================================================================
#
# EventDispatcher
# ---------------
# Subscribes to a single HA event and invokes a callback when the event fires
# and its payload matches the configured filter.
#
# event_name : HA event name to listen for.
# event_data : dict of key/value pairs that must all match the event
# payload (deep partial match — nested dicts are matched
# recursively). None means "match any payload".
# reset_data : optional dict. When set, the dispatcher becomes a
# one-shot latch: the callback fires once on event_data
# match, then waits for a reset_data match before it can
# fire again. Useful for press/release pairs.
# event_context: arbitrary value forwarded as the third argument to the
# callback — use it to identify which dispatcher fired.
#
# Callback signature: callback(event_name, event_data, event_context)
#
# EventHandler
# ------------
# Convenience wrapper that creates one EventDispatcher per entry in an
# events_block dict (the YAML "events_to_listen" structure).
#
# YAML events_block format:
# events_to_listen:
# <key>:
# event_name: <ha_event_name> # required
# event_data: # optional — payload filter
# <field>: <value>
# reset_data: # optional — latch reset filter
# <field>: <value>
#
# Usage:
# handler = EventHandler(ad_api, self.args['events_to_listen'], my_callback)
# # Keep a reference to handler — it owns the subscriptions.
#
# Or construct the events_block programmatically:
# handler = EventHandler(
# ad_api,
# {'evt': {'event_name': 'MY_EVENT', 'event_data': {'key': 'value'}}},
# my_callback,
# event_context='optional_context'
# )
# =============================================================================
class EventDispatcher:
def __init__(self,ad_api,event_name,callback,event_data,reset_data,event_context):
@@ -69,11 +116,8 @@ class EventHandler:
self.__ad_api = ad_api
self.event_dispatchers = []
#try:
for event_block in events_block.values():
register_event_with_params(event_block,callback,event_context)
#except (AttributeError):
# self.log(f"Format not supported : {events_block}")
def log(self,message,**kwargs):
self.__ad_api.log(message,**kwargs)
@@ -82,18 +126,3 @@ class EventHandler:
self.log(f'Registering dispatcher {callback.__name__} for event "{event_name}" ({event_data})')
dispatcher = EventDispatcher(self.__ad_api,event_name,callback,event_data,reset_data,event_context)
self.event_dispatchers.append(dispatcher)
# if not event_name in self.event_dispatchers:
# self.event_dispatchers[event_name] = list()
# self.log(f"listening event {event_name}")
# self.__ad_api.listen_event(self._on_event_internal,event_name)
# self.event_dispatchers[event_name].append(dispatcher)
# def _on_event_internal(self, event_name, data, kwargs):
# if event_name in self.event_dispatchers:
# for dispatcher in self.event_dispatchers[event_name]:
# if dispatcher.process_event(data) == True:
# break
# else:
# self.log(f"{event_name} has no dispatcher registered",level="WARNING")

View File

@@ -7,54 +7,113 @@ from virtualsensors import VirtualSensors
from expressionparser import ParsingException
from logger_interface import LoggerInterface
# =============================================================================
# SmartObject — Base class for all ad_toolbox AppDaemon apps
# =============================================================================
# Inherit from SmartObject instead of hass.Hass to get all features below.
#
# FEATURES
# --------
# - Entity linking : bind the app to a single HA entity (self.entity)
# - Dataset persistence : self.dataset is auto-loaded/saved to disk (pickle)
# - Constants : key/value dict injected into expression contexts
# - Templates library : shared named-template provider from another app
# - Virtual sensors : declarative derived sensors defined in YAML
# - Attributes override : map HA sensor states onto entity attributes
# - MQTT discovery : entities created via create_entity() are auto-
# published as HA MQTT discovery sensors
#
# YAML CONFIGURATION
# ------------------
# All keys are optional unless noted.
#
# entity: <entity_id>
# HA entity this app is linked to (e.g. "sensor.my_sensor").
# Available as self.entity_id and self.entity after initialize().
# Also injected into the expression context as the constant "self".
#
# default_entity_state: <state_string>
# Initial state to set when entity does not yet exist in HA.
# If omitted and the entity is missing, get_default_entity_state() is
# called (raises an error by default — override it in subclasses).
#
# trace_events: <ha_event_name | list of ha_event_names>
# Log every occurrence of the named HA event(s) to the app log.
# Useful during development to inspect event payloads.
# Example: trace_events: MY_BUTTON_EVENT
# Example: trace_events: [MY_BUTTON_EVENT, ios.action_fired]
#
# mute: true
# Suppress all log output from this app instance.
#
# constants:
# <key>: <value>
# ...
# Arbitrary key/value pairs injected into expression and template
# contexts. "self" is always added automatically as the entity_id.
#
# templates_library: <app_name>
# Name of another SmartObject app that exposes a template library.
#
# virtual_sensors:
# <sensor_id>:
# ...
# Declarative virtual sensor definitions passed to VirtualSensors.
# See virtualsensors.py for the full sub-schema.
#
# attributes_override:
# <attribute_name>: <entity_id | static_value>
# ...
# Override attributes on self.entity. Each value can be either:
# - A HA entity_id → attribute tracks that entity's state live.
# - A static value → attribute is set once at startup.
#
# mqtt_device_name: <friendly name>
# Human-readable device name used in HA MQTT discovery payloads.
# Defaults to the AppDaemon app name (self.name) if omitted.
# Requires the MQTT integration to be available in HA.
# Used by the MQTT discovery feature (see create_entity()).
# When you create a new entity with create_entity(), it will be attached to a device through MQTT discovery with this name
# and will appear in HA as a child of that device.
# Since MQTT keep those entity forever, you might have to clean up some entities with MQTT Explorer if you change some entity names or delete some entities from your app.
#
# DATASET PERSISTENCE
# -------------------
# Subclasses can store arbitrary data in self.dataset (any pickle-able
# Python object). It is automatically:
# - Loaded from apps/data/<appname>.dataset at startup (if the file exists)
# - Saved to apps/data/<appname>.dataset on terminate (if not None)
#
# SUBCLASSING
# -----------
# Override on_initialize_smart_object() instead of initialize().
# Override get_default_entity_state() to provide a default state when the
# linked entity does not exist and no default_entity_state arg is given.
# =============================================================================
class SmartObject(hass.Hass,LoggerInterface):
def _sanitize_for_topic(self, value):
return re.sub(r"[^a-zA-Z0-9_]+", "_", str(value)).strip("_").lower()
def _mqtt_lazy_init(self):
if hasattr(self, '_mqtt'):
return self._mqtt is not None
self._mqtt = None
try:
available = any(
i.get('domain') == 'mqtt' and i.get('service') == 'publish'
for i in self.list_services('global')
)
except Exception:
available = False
if not available:
return False
node_id = self._sanitize_for_topic(self.name)
device_name = self.args.get('mqtt_device_name', self.name)
self._mqtt = {
'node_id': node_id,
'device': {
'identifiers': [node_id],
'name': device_name,
'manufacturer': 'AppDaemon',
'model': self.__class__.__name__,
},
'entities': set(),
'handles': {},
}
return True
def _mqtt_publish(self, topic, payload, retain=False):
if not isinstance(payload, str):
payload = json.dumps(payload)
self.call_service('mqtt/publish', topic=topic, payload=payload, retain=retain)
def _mqtt_sync_state(self, entity_id, attribute, old, new, kwargs):
if not getattr(self, '_mqtt', None) or entity_id not in self._mqtt['entities']:
return
state_data = self.get_state(entity_id, attribute='all') or {}
payload = {'state': state_data.get('state')}
payload.update(state_data.get('attributes', {}))
node_id = self._mqtt['node_id']
obj = self._sanitize_for_topic(entity_id.split('.')[-1])
self._mqtt_publish(f"appdaemon/{node_id}/{obj}/state", payload, retain=True)
# ------------------------------------------------------------------
# Public API
# ------------------------------------------------------------------
# Create (or update) a HA entity and, when MQTT is available, register
# it with HA via MQTT discovery so it appears as a proper device entity.
#
# Parameters
# ----------
# entity_id : full HA entity id, e.g. "sensor.my_value"
# state : initial state string (optional)
# attributes : dict of initial attributes (optional)
# name : friendly name shown in HA UI
# icon : MDI icon string, e.g. "mdi:thermometer"
# unit_of_measurement : e.g. "°C", "%", "W"
# device_class : HA sensor device class, e.g. "temperature"
# state_class : "measurement", "total", "total_increasing"
#
# The entity's live state is kept in sync with MQTT automatically via
# a listen_state subscription set up at registration time.
# Returns the AppDaemon entity handle.
def create_entity(self, entity_id, state=None, attributes=None, name=None, icon=None, unit_of_measurement=None, device_class=None, state_class=None):
entity = self.get_entity(entity_id, check_existence=False)
if self._mqtt_lazy_init() and entity_id not in self._mqtt['entities']:
@@ -90,6 +149,83 @@ class SmartObject(hass.Hass,LoggerInterface):
self.set_state(entity_id, state='unknown')
return entity
# Override this in a subclass to return a default state string when the
# linked entity does not exist and no "default_entity_state" arg was set.
# The base implementation raises a fatal error.
def get_default_entity_state(self): self.log_error(f"{self.entity_id} doesn't exist. Please use default_entity_state if you wan't to create one or override get_default_entity_state()",True)
# Logs any HA event registered via trace_events with its full payload.
def _on_trace_event(self, event_name, data, kwargs):
self.log_info(f"[trace] event '{event_name}' fired. data = {data}")
# Override this in subclasses instead of initialize().
# Called at the end of initialize() after all YAML args are processed.
def on_initialize_smart_object(self): pass
# ------------------------------------------------------------------
# MQTT Discovery (internal)
# ------------------------------------------------------------------
# Called lazily on the first create_entity() call. Checks whether the
# MQTT publish service is available and, if so, initialises self._mqtt
# with the device metadata used in discovery payloads.
# Returns True when MQTT is ready, False otherwise (MQTT not installed
# or the service is not available).
def _mqtt_lazy_init(self):
if hasattr(self, '_mqtt'):
return self._mqtt is not None
self._mqtt = None
try:
available = any(
i.get('domain') == 'mqtt' and i.get('service') == 'publish'
for i in self.list_services('global')
)
except Exception:
available = False
if not available:
return False
node_id = self._sanitize_for_topic(self.name)
device_name = self.args.get('mqtt_device_name', self.name)
self._mqtt = {
'node_id': node_id,
'device': {
'identifiers': [node_id],
'name': device_name,
'manufacturer': 'AppDaemon',
'model': self.__class__.__name__,
},
'entities': set(),
'handles': {},
}
return True
def _sanitize_for_topic(self, value):
return re.sub(r"[^a-zA-Z0-9_]+", "_", str(value)).strip("_").lower()
# Serialize payload to JSON if it is not already a string, then call
# the mqtt/publish service. retain=True is used for state and config
# topics so HA picks them up immediately after a broker restart.
def _mqtt_publish(self, topic, payload, retain=False):
if not isinstance(payload, str):
payload = json.dumps(payload)
self.call_service('mqtt/publish', topic=topic, payload=payload, retain=retain)
# listen_state callback — re-publishes the full state + attributes of
# entity_id to its MQTT state topic whenever the HA state changes.
def _mqtt_sync_state(self, entity_id, attribute, old, new, kwargs):
if not getattr(self, '_mqtt', None) or entity_id not in self._mqtt['entities']:
return
state_data = self.get_state(entity_id, attribute='all') or {}
payload = {'state': state_data.get('state')}
payload.update(state_data.get('attributes', {}))
node_id = self._mqtt['node_id']
obj = self._sanitize_for_topic(entity_id.split('.')[-1])
self._mqtt_publish(f"appdaemon/{node_id}/{obj}/state", payload, retain=True)
# Called from terminate(). Publishes "offline" availability for every
# registered entity so HA marks them as unavailable, then cancels all
# listen_state subscriptions created during create_entity().
def _mqtt_terminate(self):
if not getattr(self, '_mqtt', None):
return
@@ -102,30 +238,60 @@ class SmartObject(hass.Hass,LoggerInterface):
try: self.cancel_listen_state(handle, silent=True)
except Exception: pass
# ------------------------------------------------------------------
# Dataset persistence helpers
# ------------------------------------------------------------------
# Returns the path to the shared data folder: <app_dir>/data/
def get_dataset_folder(self): return os.path.join(str(self.AD.app_dir),"data")
# Returns the full path of this app's dataset file:
# <app_dir>/data/<appname>.dataset
def get_dataset_name(self):
return os.path.join(self.get_dataset_folder(),f"{self.name}.dataset")
# ------------------------------------------------------------------
# Constants
# ------------------------------------------------------------------
# Merge constants_dict into self.constants. Constants are available
# inside expression / template evaluation contexts.
def add_constants(self,constants_dict):
self.log_info(f"Declaring constants {constants_dict}")
self.constants.update(constants_dict)
def get_default_entity_state(self): self.log_error(f"{self.entity_id} doesn't exist. Please use default_entity_state if you wan't to create one or override get_default_entity_state()",True)
# ------------------------------------------------------------------
# AppDaemon lifecycle
# ------------------------------------------------------------------
# Main AppDaemon entry point. Processes all YAML args in order:
# mute → entity → constants → templates_library → virtual_sensors
# → attributes_override → dataset restore → on_initialize_smart_object()
def initialize(self):
try:
#self.depends_on_module(["smartobject","virtualsensors","logger_interface"])
self.initialize_logger_interface(self.get_ad_api())
self.log_info(f"Name = {self.name}")
# mute: true → suppress all log output for this app
try: self.mute_logger(self.args['mute'])
except KeyError: pass
self.event_dispatchers = dict()
self.constants = dict()
self.dataset = None
#retrieve object param
# trace_events: <event | list> → log event payloads for debugging
if 'trace_events' in self.args:
event_names = self.args['trace_events']
if not isinstance(event_names, list):
event_names = [event_names]
for event_name in event_names:
self.log_info(f"Tracing event '{event_name}'")
self.listen_event(self._on_trace_event, event_name)
# entity: <entity_id> → link app to a HA entity
if "entity" in self.args:
self.entity_id = self.args["entity"]
self.entity = self.get_entity(self.entity_id)
@@ -142,9 +308,11 @@ class SmartObject(hass.Hass,LoggerInterface):
self.entity_id = None
self.entity = None
# constants: {key: value, ...} → add to expression context
if 'constants' in self.args:
self.add_constants(self.args['constants'])
# templates_library: <app_name> → load shared template provider
self.templates_library = None
if 'templates_library' in self.args:
library_name = self.args['templates_library']
@@ -155,9 +323,13 @@ class SmartObject(hass.Hass,LoggerInterface):
else:
self.log_error(f"Can't find the library app {library_name}")
# virtual_sensors: {...} → create derived sensors from YAML spec
if 'virtual_sensors' in self.args:
self.virtual_sensors = VirtualSensors(ad_api = self.get_ad_api(),logger_interface = self,super_entity_id = self.entity_id,yaml_block = self.args['virtual_sensors'],templates_library = self.templates_library,constants = self.constants)
# attributes_override: {attr: entity_id | static_value, ...}
# Each attribute either mirrors a live HA sensor or holds a
# static value set once at startup.
if 'attributes_override' in self.args:
self.attribute_sensors = dict()
new_attributes = dict()
@@ -175,7 +347,8 @@ class SmartObject(hass.Hass,LoggerInterface):
self.log_info(f"Overriding {self.entity_id} attributes with {new_attributes}")
self.entity.set_state(state = self.entity.get_state(), attributes = new_attributes)
#load dataset from disk
# Restore persisted dataset from disk (pickle). The file is
# created/updated on terminate() when self.dataset is not None.
try:
try: os.makedirs(self.get_dataset_folder())
except FileExistsError: pass
@@ -189,8 +362,8 @@ class SmartObject(hass.Hass,LoggerInterface):
except ParsingException as e: self.log_error(str(e),stop_app = True)
def on_initialize_smart_object(self): pass
# AppDaemon shutdown hook. Persists self.dataset to disk (if set),
# publishes MQTT offline availability, and cancels all state listeners.
def terminate(self):
self.event_dispatchers = None
self.virtual_sensors = None