Compare commits

...

6 Commits

9 changed files with 167 additions and 156 deletions

View File

@@ -1,4 +1,6 @@
import appdaemon.plugins.hass.hassapi as hass
import urllib.request
import json
# =============================================================================
# EventDispatcher / EventHandler — HA event subscription helpers
@@ -13,6 +15,11 @@ import appdaemon.plugins.hass.hassapi as hass
# event_data : dict of key/value pairs that must all match the event
# payload (deep partial match — nested dicts are matched
# recursively). None means "match any payload".
# Special key: ``device_name`` — for zha_event payloads you
# may supply the friendly ZHA device name instead of the raw
# IEEE address. It is resolved once at construction time via
# the ``zha/get_devices`` service and stored as
# ``device_ieee`` in the effective filter dict.
# reset_data : optional dict. When set, the dispatcher becomes a
# one-shot latch: the callback fires once on event_data
# match, then waits for a reset_data match before it can
@@ -51,13 +58,15 @@ import appdaemon.plugins.hass.hassapi as hass
class EventDispatcher:
def __init__(self,ad_api,event_name,callback,event_data,reset_data,event_context):
self.ad_api = ad_api
event_data = self._resolve_zha_device_name(event_data)
reset_data = self._resolve_zha_device_name(reset_data)
self.event_name = event_name
self.callback = callback
self.event_data = event_data
self.reset_data = reset_data
self.waiting_for_reset = False
self.event_context = event_context
self.ad_api = ad_api
if event_data == None:
self.ad_api.listen_event(self.on_event,event_name)
else:
@@ -78,6 +87,59 @@ class EventDispatcher:
def on_event(self, event_name, data, kwargs):
self.process_event(data)
def _lookup_zha_ieee(self, device_name):
# Resolve a ZHA device friendly name to its IEEE address by rendering
# a Jinja2 template via the HA REST API (/api/template).
# Retrieves the HA URL and token from the HASS plugin config object.
try:
plugin = self.ad_api.AD.plugins.get_plugin_object('default')
ha_url = str(plugin.config.ha_url).rstrip('/')
token = plugin.config.token.get_secret_value()
template = (
"{% set ns = namespace(ieee='') %}"
"{% for eid in integration_entities('zha') %}"
" {% set did = device_id(eid) %}"
" {% if did and not ns.ieee %}"
" {% if device_attr(did, 'name_by_user') == '" + device_name + "'"
" or device_attr(did, 'name') == '" + device_name + "' %}"
" {% for conn in device_attr(did, 'connections') %}"
" {% if conn[0] == 'zigbee' %}{% set ns.ieee = conn[1] %}{% endif %}"
" {% endfor %}"
" {% endif %}"
" {% endif %}"
"{% endfor %}"
"{{ ns.ieee }}"
)
payload = json.dumps({'template': template}).encode('utf-8')
req = urllib.request.Request(
f"{ha_url}/api/template",
data=payload,
headers={'Authorization': f'Bearer {token}', 'Content-Type': 'application/json'},
method='POST',
)
with urllib.request.urlopen(req, timeout=10) as resp:
result = resp.read().decode('utf-8').strip()
return result if result else None
except Exception as e:
self.ad_api.log_error(f"[EventDispatcher] Failed to resolve ZHA device name '{device_name}': {e}")
return None
def _resolve_zha_device_name(self, data):
# Replace a device_name key in data with device_ieee looked up from
# the ZHA device registry. Returns data unchanged when the key is
# absent or data is None.
if data is None or 'device_name' not in data:
return data
name = data['device_name']
ieee = self._lookup_zha_ieee(name)
resolved = {k: v for k, v in data.items() if k != 'device_name'}
if ieee is not None:
self.ad_api.log_info(f"[EventDispatcher] Resolved ZHA device name '{name}' to IEEE '{ieee}'")
resolved['device_ieee'] = ieee
else:
self.ad_api.log_error(f"[EventDispatcher] ZHA device '{name}' not found — 'device_name' filter ignored")
return resolved
def process_event(self,data):
def are_data_matching(ref_data, data):
if ref_data != None:
@@ -116,13 +178,14 @@ class EventHandler:
self.__ad_api = ad_api
self.event_dispatchers = []
if isinstance(events_block, list):
for event in events_block:
self.add_dispatcher(event,callback,event_context=event_context)
else:
for event_block in events_block.values():
register_event_with_params(event_block,callback,event_context)
def log(self,message,**kwargs):
self.__ad_api.log(message,**kwargs)
def add_dispatcher(self,event_name,callback,event_data = None,reset_data = None ,event_context = None):
self.log(f'Registering dispatcher {callback.__name__} for event "{event_name}" ({event_data})')
self.__ad_api.log_info(f'Registering dispatcher {callback.__name__} for event "{event_name}" ({event_data})')
dispatcher = EventDispatcher(self.__ad_api,event_name,callback,event_data,reset_data,event_context)
self.event_dispatchers.append(dispatcher)

View File

@@ -1,8 +1,6 @@
import appdaemon.plugins.hass.hassapi as hass
import pickle
import os
import json
import re
from virtualsensors import VirtualSensors
from expressionparser import ParsingException
from logger_interface import LoggerInterface
@@ -20,8 +18,6 @@ from logger_interface import LoggerInterface
# - Templates library : shared named-template provider from another app
# - Virtual sensors : declarative derived sensors defined in YAML
# - Attributes override : map HA sensor states onto entity attributes
# - MQTT discovery : entities created via create_entity() are auto-
# published as HA MQTT discovery sensors
#
# YAML CONFIGURATION
# ------------------
@@ -68,15 +64,6 @@ from logger_interface import LoggerInterface
# - A HA entity_id → attribute tracks that entity's state live.
# - A static value → attribute is set once at startup.
#
# mqtt_device_name: <friendly name>
# Human-readable device name used in HA MQTT discovery payloads.
# Defaults to the AppDaemon app name (self.name) if omitted.
# Requires the MQTT integration to be available in HA.
# Used by the MQTT discovery feature (see create_entity()).
# When you create a new entity with create_entity(), it will be attached to a device through MQTT discovery with this name
# and will appear in HA as a child of that device.
# Since MQTT keep those entity forever, you might have to clean up some entities with MQTT Explorer if you change some entity names or delete some entities from your app.
#
# DATASET PERSISTENCE
# -------------------
# Subclasses can store arbitrary data in self.dataset (any pickle-able
@@ -97,8 +84,8 @@ class SmartObject(hass.Hass,LoggerInterface):
# Public API
# ------------------------------------------------------------------
# Create (or update) a HA entity and, when MQTT is available, register
# it with HA via MQTT discovery so it appears as a proper device entity.
# Create (or update) a HA entity and tag it with the originating app
# through the ad_app attribute.
#
# Parameters
# ----------
@@ -110,43 +97,29 @@ class SmartObject(hass.Hass,LoggerInterface):
# unit_of_measurement : e.g. "°C", "%", "W"
# device_class : HA sensor device class, e.g. "temperature"
# state_class : "measurement", "total", "total_increasing"
#
# The entity's live state is kept in sync with MQTT automatically via
# a listen_state subscription set up at registration time.
# Returns the AppDaemon entity handle.
def create_entity(self, entity_id, state=None, attributes=None, name=None, icon=None, unit_of_measurement=None, device_class=None, state_class=None):
def create_entity(self, entity_id, state=None, attributes=None, friendly_name=None, icon=None, unit_of_measurement=None, device_class=None, state_class=None):
entity = self.get_entity(entity_id, check_existence=False)
if self._mqtt_lazy_init() and entity_id not in self._mqtt['entities']:
node_id = self._mqtt['node_id']
obj = self._sanitize_for_topic(entity_id.split('.')[-1])
state_topic = f"appdaemon/{node_id}/{obj}/state"
availability_topic = f"appdaemon/{node_id}/{obj}/availability"
config = {
'name': name or entity_id,
'unique_id': f"{node_id}_{obj}",
'state_topic': state_topic,
'value_template': '{{ value_json.state }}',
'availability_topic': availability_topic,
'payload_available': 'online',
'payload_not_available': 'offline',
'device': self._mqtt['device'],
}
if icon: config['icon'] = icon
if unit_of_measurement: config['unit_of_measurement'] = unit_of_measurement
if device_class: config['device_class'] = device_class
if state_class: config['state_class'] = state_class
self._mqtt_publish(f"homeassistant/sensor/{node_id}/{obj}/config", config, retain=True)
self._mqtt_publish(availability_topic, 'online', retain=True)
self._mqtt['entities'].add(entity_id)
self._mqtt['handles'][entity_id] = self.listen_state(self._mqtt_sync_state, entity_id)
if state is not None and attributes is not None:
self.set_state(entity_id, state=state, attributes=attributes)
elif state is not None:
self.set_state(entity_id, state=state)
elif attributes is not None:
self.set_state(entity_id, attributes=attributes)
elif not entity.exists():
self.set_state(entity_id, state='unknown')
if attributes is not None:
attributes = dict(attributes)
else:
attributes = dict()
attributes['ad_app'] = self.name
if friendly_name is not None: attributes['friendly_name'] = friendly_name
if icon is not None: attributes['icon'] = icon
if unit_of_measurement is not None: attributes['unit_of_measurement'] = unit_of_measurement
if device_class is not None: attributes['device_class'] = device_class
if state_class is not None: attributes['state_class'] = state_class
if state is not None:
entity.set_state(state=state, attributes=attributes)
else:
if not entity.exists():
entity.set_state(state='unknown')
entity.set_state(attributes=attributes)
return entity
# Override this in a subclass to return a default state string when the
@@ -162,82 +135,6 @@ class SmartObject(hass.Hass,LoggerInterface):
# Called at the end of initialize() after all YAML args are processed.
def on_initialize_smart_object(self): pass
# ------------------------------------------------------------------
# MQTT Discovery (internal)
# ------------------------------------------------------------------
# Called lazily on the first create_entity() call. Checks whether the
# MQTT publish service is available and, if so, initialises self._mqtt
# with the device metadata used in discovery payloads.
# Returns True when MQTT is ready, False otherwise (MQTT not installed
# or the service is not available).
def _mqtt_lazy_init(self):
if hasattr(self, '_mqtt'):
return self._mqtt is not None
self._mqtt = None
try:
available = any(
i.get('domain') == 'mqtt' and i.get('service') == 'publish'
for i in self.list_services('global')
)
except Exception:
available = False
if not available:
return False
node_id = self._sanitize_for_topic(self.name)
device_name = self.args.get('mqtt_device_name', self.name)
self._mqtt = {
'node_id': node_id,
'device': {
'identifiers': [node_id],
'name': device_name,
'manufacturer': 'AppDaemon',
'model': self.__class__.__name__,
},
'entities': set(),
'handles': {},
}
return True
def _sanitize_for_topic(self, value):
return re.sub(r"[^a-zA-Z0-9_]+", "_", str(value)).strip("_").lower()
# Serialize payload to JSON if it is not already a string, then call
# the mqtt/publish service. retain=True is used for state and config
# topics so HA picks them up immediately after a broker restart.
def _mqtt_publish(self, topic, payload, retain=False):
if not isinstance(payload, str):
payload = json.dumps(payload)
self.call_service('mqtt/publish', topic=topic, payload=payload, retain=retain)
# listen_state callback — re-publishes the full state + attributes of
# entity_id to its MQTT state topic whenever the HA state changes.
def _mqtt_sync_state(self, entity_id, attribute, old, new, kwargs):
if not getattr(self, '_mqtt', None) or entity_id not in self._mqtt['entities']:
return
state_data = self.get_state(entity_id, attribute='all') or {}
payload = {'state': state_data.get('state')}
payload.update(state_data.get('attributes', {}))
node_id = self._mqtt['node_id']
obj = self._sanitize_for_topic(entity_id.split('.')[-1])
self._mqtt_publish(f"appdaemon/{node_id}/{obj}/state", payload, retain=True)
# Called from terminate(). Publishes "offline" availability for every
# registered entity so HA marks them as unavailable, then cancels all
# listen_state subscriptions created during create_entity().
def _mqtt_terminate(self):
if not getattr(self, '_mqtt', None):
return
for entity_id in self._mqtt['entities']:
node_id = self._mqtt['node_id']
obj = self._sanitize_for_topic(entity_id.split('.')[-1])
try: self._mqtt_publish(f"appdaemon/{node_id}/{obj}/availability", 'offline', retain=True)
except Exception: pass
for handle in self._mqtt['handles'].values():
try: self.cancel_listen_state(handle, silent=True)
except Exception: pass
# ------------------------------------------------------------------
# Dataset persistence helpers
# ------------------------------------------------------------------
@@ -325,7 +222,7 @@ class SmartObject(hass.Hass,LoggerInterface):
# virtual_sensors: {...} → create derived sensors from YAML spec
if 'virtual_sensors' in self.args:
self.virtual_sensors = VirtualSensors(ad_api = self.get_ad_api(),logger_interface = self,super_entity_id = self.entity_id,yaml_block = self.args['virtual_sensors'],templates_library = self.templates_library,constants = self.constants)
self.virtual_sensors = VirtualSensors(ad_api = self,logger_interface = self,super_entity_id = self.entity_id,yaml_block = self.args['virtual_sensors'],templates_library = self.templates_library,constants = self.constants,app_name = self.name)
# attributes_override: {attr: entity_id | static_value, ...}
# Each attribute either mirrors a live HA sensor or holds a
@@ -362,12 +259,10 @@ class SmartObject(hass.Hass,LoggerInterface):
except ParsingException as e: self.log_error(str(e),stop_app = True)
# AppDaemon shutdown hook. Persists self.dataset to disk (if set),
# publishes MQTT offline availability, and cancels all state listeners.
# AppDaemon shutdown hook. Persists self.dataset to disk (if set).
def terminate(self):
self.event_dispatchers = None
self.virtual_sensors = None
self._mqtt_terminate()
try: has_dataset = self.dataset != None
except AttributeError: has_dataset = False
if has_dataset:

30
virtualevents.py Normal file
View File

@@ -0,0 +1,30 @@
import appdaemon.plugins.hass.hassapi as hass
import smartcondition as SmartCondition
from expressionparser import ParsingException
from logger_interface import LoggerInterface
class VirtualEvents(hass.Hass,LoggerInterface):
def initialize(self):
self.initialize_logger_interface(self.get_ad_api())
try: self.mute_logger(self.args['mute'])
except KeyError: pass
self.virtual_events = dict()
if "virtual_events" in self.args:
for event in self.args["virtual_events"]:
try: self.virtual_events[event] = SmartCondition.Evaluator(self,self.args["virtual_events"][event]['event_condition'],condition_name = event,on_succeed_cb = self.on_condition_succeed,pass_condition_name_to_cb = event, trigger_callback_on_activation = False, trigger_callback_on_entity_creation = False)
except ParsingException as e:
self.log_error(str(e))
continue
def on_condition_succeed(self,event_name):
event_yaml = self.args["virtual_events"][event_name]
if 'event_data' in event_yaml:
self.log(f"Sending event {event_yaml['event_name']} data = {event_yaml['event_data']}")
self.fire_event(event_yaml['event_name'],**event_yaml['event_data'])
else:
self.log(f"Sending event {event_yaml['event_name']}")
self.fire_event(event_yaml['event_name'])

View File

@@ -7,7 +7,7 @@ import time
from logger_interface import LoggerInterface
class VirtualSensorBase:
def __init__(self,ad_api,logger_interface, virtual_sensor_name = None, sensor_name = None,super_entity_id = None, yaml_block = None,templates_library = None, constants = None,self_initialize = False):
def __init__(self,ad_api,logger_interface, virtual_sensor_name = None, sensor_name = None,super_entity_id = None, yaml_block = None,templates_library = None, constants = None, app_name = None,self_initialize = False):
self.ad_api = ad_api
self.logger_interface = logger_interface
@@ -32,6 +32,9 @@ class VirtualSensorBase:
try: self.attributes = self.yaml_block['attributes']
except (TypeError,KeyError): self.attributes = dict()
if app_name:
self.attributes['ad_app'] = app_name
if constants:
self.constants = dict(constants) # we don't want to modify the parent dict
self.constants['self'] = self.sensor_name # we need to set self by ourself
@@ -554,7 +557,7 @@ class ValueSensor(VirtualSensorBase):
class VirtualSensors():
def __init__(self, ad_api = None,logger_interface = None, super_entity_id = None, yaml_block = None ,args_to_ignore_in_validation = [], constants = None, templates_library = None):
def __init__(self, ad_api = None,logger_interface = None, super_entity_id = None, yaml_block = None ,args_to_ignore_in_validation = [], constants = None, templates_library = None, app_name = None):
assert ad_api
assert yaml_block
assert logger_interface
@@ -572,20 +575,20 @@ class VirtualSensors():
averagers = parser.parse_args('averagers',{})
for averager in averagers:
self.virtual_sensors[f"sensor.{averager}"] = Averager(self.ad_api,self.logger_interface,averager,super_entity_id = super_entity_id,yaml_block = yaml_block['averagers'][averager],constants = constants, templates_library = templates_library)
self.virtual_sensors[f"sensor.{averager}"] = Averager(self.ad_api,self.logger_interface,averager,super_entity_id = super_entity_id,yaml_block = yaml_block['averagers'][averager],constants = constants, templates_library = templates_library,app_name = app_name)
continuous_conditions = parser.parse_args('continuous_conditions',{})
for continuous_condition in continuous_conditions:
self.virtual_sensors[f"binary_sensor.{continuous_condition}"] = ContinuousCondition(self.ad_api,self.logger_interface,continuous_condition,super_entity_id = super_entity_id,yaml_block = yaml_block['continuous_conditions'][continuous_condition],templates_library = templates_library)
self.virtual_sensors[f"binary_sensor.{continuous_condition}"] = ContinuousCondition(self.ad_api,self.logger_interface,continuous_condition,super_entity_id = super_entity_id,yaml_block = yaml_block['continuous_conditions'][continuous_condition],templates_library = templates_library,app_name = app_name)
binary_sensors = parser.parse_args('binary_sensors',{})
for binary_sensor in binary_sensors:
#self.logger_interface.log_info(f"Creating Binary Sensor: binary_sensor = {binary_sensor}, super_entity_id = {super_entity_id}, yaml_block = {yaml_block['binary_sensors']}")
self.virtual_sensors[f"binary_sensor.{binary_sensor}"] = BinarySensor(self.ad_api,self.logger_interface,binary_sensor,super_entity_id = super_entity_id,yaml_block = yaml_block['binary_sensors'][binary_sensor],constants = constants,templates_library = templates_library)
self.virtual_sensors[f"binary_sensor.{binary_sensor}"] = BinarySensor(self.ad_api,self.logger_interface,binary_sensor,super_entity_id = super_entity_id,yaml_block = yaml_block['binary_sensors'][binary_sensor],constants = constants,templates_library = templates_library,app_name = app_name)
value_selectors = parser.parse_args('value_selectors',{})
for value_selector in value_selectors:
self.virtual_sensors[f"sensor.{value_selector}"] = ValueSelector(self.ad_api,self.logger_interface,value_selector,super_entity_id = super_entity_id,yaml_block = yaml_block['value_selectors'][value_selector],constants = constants,templates_library = templates_library)
self.virtual_sensors[f"sensor.{value_selector}"] = ValueSelector(self.ad_api,self.logger_interface,value_selector,super_entity_id = super_entity_id,yaml_block = yaml_block['value_selectors'][value_selector],constants = constants,templates_library = templates_library,app_name = app_name)
sensors = parser.parse_args('sensors',{})
for sensor in sensors:
@@ -594,22 +597,20 @@ class VirtualSensors():
self.logger_interface.log_error(f"Invalid sensor name {splitted_sensor}")
if splitted_sensor[0] == 'binary_sensor':
self.virtual_sensors[f"binary_sensor.{splitted_sensor[1]}"] = BinarySensor(self.ad_api,self.logger_interface,splitted_sensor[1],super_entity_id = super_entity_id,yaml_block = yaml_block['sensors'][sensor],constants = constants,templates_library = templates_library)
self.virtual_sensors[f"binary_sensor.{splitted_sensor[1]}"] = BinarySensor(self.ad_api,self.logger_interface,splitted_sensor[1],super_entity_id = super_entity_id,yaml_block = yaml_block['sensors'][sensor],constants = constants,templates_library = templates_library,app_name = app_name)
elif splitted_sensor[0] == 'sensor':
self.virtual_sensors[f"sensor.{splitted_sensor[1]}"] = ValueSensor(self.ad_api,self.logger_interface,splitted_sensor[1],super_entity_id = super_entity_id,yaml_block = yaml_block['sensors'][sensor],constants = constants,templates_library = templates_library)
self.virtual_sensors[f"sensor.{splitted_sensor[1]}"] = ValueSensor(self.ad_api,self.logger_interface,splitted_sensor[1],super_entity_id = super_entity_id,yaml_block = yaml_block['sensors'][sensor],constants = constants,templates_library = templates_library,app_name = app_name)
elif splitted_sensor[0] == 'continuous_condition':
self.virtual_sensors[f"binary_sensor.{splitted_sensor[1]}"] = ContinuousCondition(self.ad_api,self.logger_interface,splitted_sensor[1],super_entity_id = super_entity_id,yaml_block = yaml_block['sensors'][sensor],constants = constants,templates_library = templates_library)
self.virtual_sensors[f"binary_sensor.{splitted_sensor[1]}"] = ContinuousCondition(self.ad_api,self.logger_interface,splitted_sensor[1],super_entity_id = super_entity_id,yaml_block = yaml_block['sensors'][sensor],constants = constants,templates_library = templates_library,app_name = app_name)
elif splitted_sensor[0] == 'averager':
self.virtual_sensors[f"sensor.{splitted_sensor[1]}"] = Averager(self.ad_api,self.logger_interface,splitted_sensor[1],super_entity_id = super_entity_id,yaml_block = yaml_block['sensors'][sensor],constants = constants,templates_library = templates_library)
self.virtual_sensors[f"sensor.{splitted_sensor[1]}"] = Averager(self.ad_api,self.logger_interface,splitted_sensor[1],super_entity_id = super_entity_id,yaml_block = yaml_block['sensors'][sensor],constants = constants,templates_library = templates_library,app_name = app_name)
elif splitted_sensor[0] == 'value_selector':
self.virtual_sensors[f"sensor.{splitted_sensor[1]}"] = ValueSelector(self.ad_api,self.logger_interface,splitted_sensor[1],super_entity_id = super_entity_id,yaml_block = yaml_block['sensors'][sensor],constants = constants,templates_library = templates_library)
self.virtual_sensors[f"sensor.{splitted_sensor[1]}"] = ValueSelector(self.ad_api,self.logger_interface,splitted_sensor[1],super_entity_id = super_entity_id,yaml_block = yaml_block['sensors'][sensor],constants = constants,templates_library = templates_library,app_name = app_name)
elif splitted_sensor[0] == 'retain_condition':
self.virtual_sensors[f"binary_sensor.{splitted_sensor[1]}"] = RetainCondition(self.ad_api,self.logger_interface,splitted_sensor[1],super_entity_id = super_entity_id,yaml_block = yaml_block['sensors'][sensor],constants = constants,templates_library = templates_library)
self.virtual_sensors[f"binary_sensor.{splitted_sensor[1]}"] = RetainCondition(self.ad_api,self.logger_interface,splitted_sensor[1],super_entity_id = super_entity_id,yaml_block = yaml_block['sensors'][sensor],constants = constants,templates_library = templates_library,app_name = app_name)
else:
self.logger_interface.log_error(f"Invalid sensor prefix {splitted_sensor[0]}")
parser.validate_args(args_to_ignore_in_validation)
dependencies_graph = dict()
for sensor_name, virtual_sensor in self.virtual_sensors.items():
try:
@@ -672,4 +673,4 @@ class VirtualSensors():
class VirtualSensorsApp(hass.Hass):
def initialize(self):
self.logger_interface = LoggerInterface(self.get_ad_api(),default_log = "virtualsensors_log")
self.virtual_sensors = VirtualSensors(ad_api = self.get_ad_api(),logger_interface = self.logger_interface,yaml_block = self.args,args_to_ignore_in_validation = ['module','class','global_dependencies','priority','name','config_path'])
self.virtual_sensors = VirtualSensors(ad_api = self.get_ad_api(),logger_interface = self.logger_interface,yaml_block = self.args,args_to_ignore_in_validation = ['module','class','global_dependencies','priority','name','config_path'],app_name = self.name)

View File

@@ -86,6 +86,10 @@
"title": "AppDaemon: Refresh Entity List",
"icon": "$(sync)"
},
{
"command": "appdaemon.handleErrors",
"title": "AppDaemon: Handle Errors"
},
{
"command": "appdaemon.clearErrors",
"title": "AppDaemon: Clear Error Diagnostics"

View File

@@ -37,7 +37,10 @@ export class EntityCompletionProvider implements vscode.CompletionItemProvider {
const domain = dotMatch[1];
const partial = dotMatch[2];
if (this.cachedDomains.has(domain)) {
const items = this.buildDomainItems(entities, domain, partial);
// Range covers "domain.partial" so the full entity_id replaces it
const replaceStart = position.translate(0, -(domain.length + 1 + partial.length));
const replaceRange = new vscode.Range(replaceStart, position);
const items = this.buildDomainItems(entities, domain, partial, replaceRange);
if (items.length > 0) {
return new vscode.CompletionList(items, false);
}
@@ -80,7 +83,8 @@ export class EntityCompletionProvider implements vscode.CompletionItemProvider {
private buildDomainItems(
entities: HAEntity[],
domain: string,
partial: string
partial: string,
replaceRange: vscode.Range
): vscode.CompletionItem[] {
const items: vscode.CompletionItem[] = [];
const prefix = `${domain}.`;
@@ -105,6 +109,7 @@ export class EntityCompletionProvider implements vscode.CompletionItemProvider {
item.documentation = new vscode.MarkdownString(formatEntityMarkdown(entity));
item.filterText = entity.entity_id;
item.sortText = entity.entity_id;
item.range = replaceRange;
items.push(item);
}
return items;

View File

@@ -285,7 +285,18 @@ export async function activate(context: vscode.ExtensionContext) {
vscode.commands.registerCommand('appdaemon.showErrors', () => {
errorViewer.show();
}),
vscode.commands.registerCommand('appdaemon.handleErrors', async () => {
const pick = await vscode.window.showErrorMessage(
'AppDaemon errors detected',
'Show log',
'Clear'
);
if (pick === 'Show log') {
errorViewer.show();
} else if (pick === 'Clear') {
errorViewer.clearDiagnostics();
}
}),
vscode.commands.registerCommand('appdaemon.refreshEntities', async () => {
const entities = await haClient.fetchEntities();
vscode.window.showInformationMessage(`AppDaemon: ${entities.length} entities loaded`);

View File

@@ -78,11 +78,13 @@ export class StatusBarManager {
if (count > 0) {
this.errorItem.text = `$(warning) AD Errors (${count})`;
this.errorItem.backgroundColor = new vscode.ThemeColor('statusBarItem.errorBackground');
this.errorItem.tooltip = `${count} error(s) in AppDaemon log — click to view`;
this.errorItem.tooltip = `${count} error(s) — click to show or clear`;
this.errorItem.command = 'appdaemon.handleErrors';
} else {
this.errorItem.text = '$(check) AD Errors';
this.errorItem.backgroundColor = undefined;
this.errorItem.tooltip = 'No AppDaemon errors — click to view log';
this.errorItem.command = 'appdaemon.showErrors';
}
}