Files
ad_toolbox/smartobject.py

388 lines
19 KiB
Python

import appdaemon.plugins.hass.hassapi as hass
import pickle
import os
import json
import re
from virtualsensors import VirtualSensors
from expressionparser import ParsingException
from logger_interface import LoggerInterface
# =============================================================================
# SmartObject — Base class for all ad_toolbox AppDaemon apps
# =============================================================================
# Inherit from SmartObject instead of hass.Hass to get all features below.
#
# FEATURES
# --------
# - Entity linking : bind the app to a single HA entity (self.entity)
# - Dataset persistence : self.dataset is auto-loaded/saved to disk (pickle)
# - Constants : key/value dict injected into expression contexts
# - Templates library : shared named-template provider from another app
# - Virtual sensors : declarative derived sensors defined in YAML
# - Attributes override : map HA sensor states onto entity attributes
# - MQTT discovery : entities created via create_entity() are auto-
# published as HA MQTT discovery sensors
#
# YAML CONFIGURATION
# ------------------
# All keys are optional unless noted.
#
# entity: <entity_id>
# HA entity this app is linked to (e.g. "sensor.my_sensor").
# Available as self.entity_id and self.entity after initialize().
# Also injected into the expression context as the constant "self".
#
# default_entity_state: <state_string>
# Initial state to set when entity does not yet exist in HA.
# If omitted and the entity is missing, get_default_entity_state() is
# called (raises an error by default — override it in subclasses).
#
# trace_events: <ha_event_name | list of ha_event_names>
# Log every occurrence of the named HA event(s) to the app log.
# Useful during development to inspect event payloads.
# Example: trace_events: MY_BUTTON_EVENT
# Example: trace_events: [MY_BUTTON_EVENT, ios.action_fired]
#
# mute: true
# Suppress all log output from this app instance.
#
# constants:
# <key>: <value>
# ...
# Arbitrary key/value pairs injected into expression and template
# contexts. "self" is always added automatically as the entity_id.
#
# templates_library: <app_name>
# Name of another SmartObject app that exposes a template library.
#
# virtual_sensors:
# <sensor_id>:
# ...
# Declarative virtual sensor definitions passed to VirtualSensors.
# See virtualsensors.py for the full sub-schema.
#
# attributes_override:
# <attribute_name>: <entity_id | static_value>
# ...
# Override attributes on self.entity. Each value can be either:
# - A HA entity_id → attribute tracks that entity's state live.
# - A static value → attribute is set once at startup.
#
# mqtt_device_name: <friendly name>
# Human-readable device name used in HA MQTT discovery payloads.
# Defaults to the AppDaemon app name (self.name) if omitted.
# Requires the MQTT integration to be available in HA.
# Used by the MQTT discovery feature (see create_entity()).
# When you create a new entity with create_entity(), it will be attached to a device through MQTT discovery with this name
# and will appear in HA as a child of that device.
# Since MQTT keep those entity forever, you might have to clean up some entities with MQTT Explorer if you change some entity names or delete some entities from your app.
#
# DATASET PERSISTENCE
# -------------------
# Subclasses can store arbitrary data in self.dataset (any pickle-able
# Python object). It is automatically:
# - Loaded from apps/data/<appname>.dataset at startup (if the file exists)
# - Saved to apps/data/<appname>.dataset on terminate (if not None)
#
# SUBCLASSING
# -----------
# Override on_initialize_smart_object() instead of initialize().
# Override get_default_entity_state() to provide a default state when the
# linked entity does not exist and no default_entity_state arg is given.
# =============================================================================
class SmartObject(hass.Hass,LoggerInterface):
# ------------------------------------------------------------------
# Public API
# ------------------------------------------------------------------
# Create (or update) a HA entity and, when MQTT is available, register
# it with HA via MQTT discovery so it appears as a proper device entity.
#
# Parameters
# ----------
# entity_id : full HA entity id, e.g. "sensor.my_value"
# state : initial state string (optional)
# attributes : dict of initial attributes (optional)
# name : friendly name shown in HA UI
# icon : MDI icon string, e.g. "mdi:thermometer"
# unit_of_measurement : e.g. "°C", "%", "W"
# device_class : HA sensor device class, e.g. "temperature"
# state_class : "measurement", "total", "total_increasing"
#
# The entity's live state is kept in sync with MQTT automatically via
# a listen_state subscription set up at registration time.
# Returns the AppDaemon entity handle.
def create_entity(self, entity_id, state=None, attributes=None, name=None, icon=None, unit_of_measurement=None, device_class=None, state_class=None):
entity = self.get_entity(entity_id, check_existence=False)
if self._mqtt_lazy_init() and entity_id not in self._mqtt['entities']:
node_id = self._mqtt['node_id']
obj = self._sanitize_for_topic(entity_id.split('.')[-1])
state_topic = f"appdaemon/{node_id}/{obj}/state"
availability_topic = f"appdaemon/{node_id}/{obj}/availability"
config = {
'name': name or entity_id,
'unique_id': f"{node_id}_{obj}",
'state_topic': state_topic,
'value_template': '{{ value_json.state }}',
'availability_topic': availability_topic,
'payload_available': 'online',
'payload_not_available': 'offline',
'device': self._mqtt['device'],
}
if icon: config['icon'] = icon
if unit_of_measurement: config['unit_of_measurement'] = unit_of_measurement
if device_class: config['device_class'] = device_class
if state_class: config['state_class'] = state_class
self._mqtt_publish(f"homeassistant/sensor/{node_id}/{obj}/config", config, retain=True)
self._mqtt_publish(availability_topic, 'online', retain=True)
self._mqtt['entities'].add(entity_id)
self._mqtt['handles'][entity_id] = self.listen_state(self._mqtt_sync_state, entity_id)
if state is not None and attributes is not None:
self.set_state(entity_id, state=state, attributes=attributes)
elif state is not None:
self.set_state(entity_id, state=state)
elif attributes is not None:
self.set_state(entity_id, attributes=attributes)
elif not entity.exists():
self.set_state(entity_id, state='unknown')
return entity
# Override this in a subclass to return a default state string when the
# linked entity does not exist and no "default_entity_state" arg was set.
# The base implementation raises a fatal error.
def get_default_entity_state(self): self.log_error(f"{self.entity_id} doesn't exist. Please use default_entity_state if you wan't to create one or override get_default_entity_state()",True)
# Logs any HA event registered via trace_events with its full payload.
def _on_trace_event(self, event_name, data, kwargs):
self.log_info(f"[trace] event '{event_name}' fired. data = {data}")
# Override this in subclasses instead of initialize().
# Called at the end of initialize() after all YAML args are processed.
def on_initialize_smart_object(self): pass
# ------------------------------------------------------------------
# MQTT Discovery (internal)
# ------------------------------------------------------------------
# Called lazily on the first create_entity() call. Checks whether the
# MQTT publish service is available and, if so, initialises self._mqtt
# with the device metadata used in discovery payloads.
# Returns True when MQTT is ready, False otherwise (MQTT not installed
# or the service is not available).
def _mqtt_lazy_init(self):
if hasattr(self, '_mqtt'):
return self._mqtt is not None
self._mqtt = None
try:
available = any(
i.get('domain') == 'mqtt' and i.get('service') == 'publish'
for i in self.list_services('global')
)
except Exception:
available = False
if not available:
return False
node_id = self._sanitize_for_topic(self.name)
device_name = self.args.get('mqtt_device_name', self.name)
self._mqtt = {
'node_id': node_id,
'device': {
'identifiers': [node_id],
'name': device_name,
'manufacturer': 'AppDaemon',
'model': self.__class__.__name__,
},
'entities': set(),
'handles': {},
}
return True
def _sanitize_for_topic(self, value):
return re.sub(r"[^a-zA-Z0-9_]+", "_", str(value)).strip("_").lower()
# Serialize payload to JSON if it is not already a string, then call
# the mqtt/publish service. retain=True is used for state and config
# topics so HA picks them up immediately after a broker restart.
def _mqtt_publish(self, topic, payload, retain=False):
if not isinstance(payload, str):
payload = json.dumps(payload)
self.call_service('mqtt/publish', topic=topic, payload=payload, retain=retain)
# listen_state callback — re-publishes the full state + attributes of
# entity_id to its MQTT state topic whenever the HA state changes.
def _mqtt_sync_state(self, entity_id, attribute, old, new, kwargs):
if not getattr(self, '_mqtt', None) or entity_id not in self._mqtt['entities']:
return
state_data = self.get_state(entity_id, attribute='all') or {}
payload = {'state': state_data.get('state')}
payload.update(state_data.get('attributes', {}))
node_id = self._mqtt['node_id']
obj = self._sanitize_for_topic(entity_id.split('.')[-1])
self._mqtt_publish(f"appdaemon/{node_id}/{obj}/state", payload, retain=True)
# Called from terminate(). Publishes "offline" availability for every
# registered entity so HA marks them as unavailable, then cancels all
# listen_state subscriptions created during create_entity().
def _mqtt_terminate(self):
if not getattr(self, '_mqtt', None):
return
for entity_id in self._mqtt['entities']:
node_id = self._mqtt['node_id']
obj = self._sanitize_for_topic(entity_id.split('.')[-1])
try: self._mqtt_publish(f"appdaemon/{node_id}/{obj}/availability", 'offline', retain=True)
except Exception: pass
for handle in self._mqtt['handles'].values():
try: self.cancel_listen_state(handle, silent=True)
except Exception: pass
# ------------------------------------------------------------------
# Dataset persistence helpers
# ------------------------------------------------------------------
# Returns the path to the shared data folder: <app_dir>/data/
def get_dataset_folder(self): return os.path.join(str(self.AD.app_dir),"data")
# Returns the full path of this app's dataset file:
# <app_dir>/data/<appname>.dataset
def get_dataset_name(self):
return os.path.join(self.get_dataset_folder(),f"{self.name}.dataset")
# ------------------------------------------------------------------
# Constants
# ------------------------------------------------------------------
# Merge constants_dict into self.constants. Constants are available
# inside expression / template evaluation contexts.
def add_constants(self,constants_dict):
self.log_info(f"Declaring constants {constants_dict}")
self.constants.update(constants_dict)
# ------------------------------------------------------------------
# AppDaemon lifecycle
# ------------------------------------------------------------------
# Main AppDaemon entry point. Processes all YAML args in order:
# mute → entity → constants → templates_library → virtual_sensors
# → attributes_override → dataset restore → on_initialize_smart_object()
def initialize(self):
try:
#self.depends_on_module(["smartobject","virtualsensors","logger_interface"])
self.initialize_logger_interface(self.get_ad_api())
self.log_info(f"Name = {self.name}")
# mute: true → suppress all log output for this app
try: self.mute_logger(self.args['mute'])
except KeyError: pass
self.event_dispatchers = dict()
self.constants = dict()
self.dataset = None
# trace_events: <event | list> → log event payloads for debugging
if 'trace_events' in self.args:
event_names = self.args['trace_events']
if not isinstance(event_names, list):
event_names = [event_names]
for event_name in event_names:
self.log_info(f"Tracing event '{event_name}'")
self.listen_event(self._on_trace_event, event_name)
# entity: <entity_id> → link app to a HA entity
if "entity" in self.args:
self.entity_id = self.args["entity"]
self.entity = self.get_entity(self.entity_id)
if self.entity.exists():
self.log_info(f"Linked to {self.entity_id}")
elif 'default_entity_state' in self.args:
self.entity.set_state(state = self.args["default_entity_state"])
self.log_info(f"Creating {self.entity_id}, default_state = {self.args['default_entity_state']}")
else:
self.entity.set_state(state = self.get_default_entity_state())
self.constants['self'] = self.entity_id
else:
self.entity_id = None
self.entity = None
# constants: {key: value, ...} → add to expression context
if 'constants' in self.args:
self.add_constants(self.args['constants'])
# templates_library: <app_name> → load shared template provider
self.templates_library = None
if 'templates_library' in self.args:
library_name = self.args['templates_library']
library_app = self.get_app(library_name)
if library_app:
self.listen_event(self.on_template_library_loaded,'template_library_loaded', library_name = library_name)
self.templates_library = library_app.get_template_library()
else:
self.log_error(f"Can't find the library app {library_name}")
# virtual_sensors: {...} → create derived sensors from YAML spec
if 'virtual_sensors' in self.args:
self.virtual_sensors = VirtualSensors(ad_api = self.get_ad_api(),logger_interface = self,super_entity_id = self.entity_id,yaml_block = self.args['virtual_sensors'],templates_library = self.templates_library,constants = self.constants)
# attributes_override: {attr: entity_id | static_value, ...}
# Each attribute either mirrors a live HA sensor or holds a
# static value set once at startup.
if 'attributes_override' in self.args:
self.attribute_sensors = dict()
new_attributes = dict()
for attribute in self.args['attributes_override']:
attribute_sensor = self.args['attributes_override'][attribute]
if self.entity_exists(attribute_sensor):
self.log_info(f"Registering sensor {attribute_sensor} for attribute {attribute}")
self.attribute_sensors[attribute_sensor] = attribute
self.listen_state(self.on_attribute_sensor_changed,attribute_sensor)
attribute_value = self.get_state(attribute_sensor)
else:
attribute_value = self.args['attributes_override'][attribute]
new_attributes[attribute] = attribute_value
self.log_info(f"Overriding {self.entity_id} attributes with {new_attributes}")
self.entity.set_state(state = self.entity.get_state(), attributes = new_attributes)
# Restore persisted dataset from disk (pickle). The file is
# created/updated on terminate() when self.dataset is not None.
try:
try: os.makedirs(self.get_dataset_folder())
except FileExistsError: pass
f = open(self.get_dataset_name(), 'rb')
self.dataset = pickle.load(f)
f.close()
self.log_info(self.get_dataset_name() + " loaded")
except (FileNotFoundError,EOFError): pass #self.log_info("File " + self.get_dataset_name() + " not found (and it's ok)")
self.on_initialize_smart_object()
except ParsingException as e: self.log_error(str(e),stop_app = True)
# AppDaemon shutdown hook. Persists self.dataset to disk (if set),
# publishes MQTT offline availability, and cancels all state listeners.
def terminate(self):
self.event_dispatchers = None
self.virtual_sensors = None
self._mqtt_terminate()
try: has_dataset = self.dataset != None
except AttributeError: has_dataset = False
if has_dataset:
self.log_info("Writing dataset to " + self.get_dataset_name())
f = open(self.get_dataset_name(), 'wb')
pickle.dump(self.dataset, f)
f.close()
def on_template_library_loaded(self, event_name, data, kwargs):
self.log_info(f"Restarting app to reload new template")
self.restart_app(self.name)
def on_attribute_sensor_changed(self, entity, attribute, old, new, kwargs):
if new != old:
new_attributes = { self.attribute_sensors[entity] : new}
self.log_info(f"Overriding {self.entity_id} attributes with {new_attributes}")
self.entity.set_state(attributes = new_attributes)