Skip to content
Draft
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ faust-cchardet==2.1.19
gitpython==3.1.44
jinja2==3.1.6
log-rate-limit==1.4.2
logging-journald==0.6.11
orjson==3.10.18
pulsectl==24.12.0
pyudev==0.24.3
Expand Down
38 changes: 25 additions & 13 deletions supervisor/bootstrap.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@
from .store import StoreManager
from .supervisor import Supervisor
from .updater import Updater
from .utils.logging import HAOSLogHandler
from .utils.sentry import capture_exception, init_sentry

_LOGGER: logging.Logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -246,20 +247,31 @@ def initialize_logging() -> None:
# suppress overly verbose logs from libraries that aren't helpful
logging.getLogger("aiohttp.access").setLevel(logging.WARNING)

logging.getLogger().handlers[0].setFormatter(
ColoredFormatter(
colorfmt,
datefmt=datefmt,
reset=True,
log_colors={
"DEBUG": "cyan",
"INFO": "green",
"WARNING": "yellow",
"ERROR": "red",
"CRITICAL": "red",
},
default_handler = logging.getLogger().handlers[0]

if HAOSLogHandler.is_available():
# replace the default logging handler with JournaldLogHandler
logging.getLogger().removeHandler(default_handler)
supervisor_name = os.environ[ENV_SUPERVISOR_NAME]
journald_handler = HAOSLogHandler(identifier=supervisor_name)
journald_handler.setFormatter(logging.Formatter(fmt, datefmt=datefmt))
logging.getLogger().addHandler(journald_handler)
else:
default_handler.setFormatter(
ColoredFormatter(
colorfmt,
datefmt=datefmt,
reset=True,
log_colors={
"DEBUG": "cyan",
"INFO": "green",
"WARNING": "yellow",
"ERROR": "red",
"CRITICAL": "red",
},
)
)
)

warnings.showwarning = warning_handler


Expand Down
2 changes: 1 addition & 1 deletion supervisor/host/sound.py
Original file line number Diff line number Diff line change
Expand Up @@ -339,7 +339,7 @@ def _get_pulse_data() -> PulseData:
profile.name == card.profile_active.name,
)
for profile in card.profile_list
if profile.available
if profile.is_available
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Huh, this is probably side-effect of the IDE refactoring tool, shouldn't be here.

],
)
for card in pulse.card_list()
Expand Down
48 changes: 48 additions & 0 deletions supervisor/utils/logging.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
import queue
from typing import Any

from logging_journald import Facility, JournaldLogHandler


class AddonLoggerAdapter(logging.LoggerAdapter):
"""Logging Adapter which prepends log entries with add-on name."""
Expand Down Expand Up @@ -59,6 +61,52 @@
self.listener = None


class HAOSLogHandler(JournaldLogHandler):
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If the Core should log through Systemd as well, I think it would be good to extract this to a shared Python package.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yeah make sense, it is fine by me to have this in Supervisor for the time being.

"""Log handler for writing logs to the Home Assistant OS Systemd Journal."""

SYSLOG_FACILITY = Facility.LOCAL7

def __init__(self, identifier: str | None = None) -> None:
"""Initialize the HAOS log handler."""
super().__init__(identifier=identifier, facility=HAOSLogHandler.SYSLOG_FACILITY)
self._container_id = self._get_container_id()

@staticmethod
def _get_container_id() -> str | None:
"""Get the container ID if running inside a Docker container."""
# Currently we only have this hacky way of getting the container ID,
# we (probably) cannot get it without having some cgroup namespaces
# mounted in the container or passed it there using other means.
# Not obtaining it will only result in the logs not being available
# through `docker logs` command, so it is not a critical issue.
with open("/proc/self/mountinfo") as f:

Check warning on line 82 in supervisor/utils/logging.py

View workflow job for this annotation

GitHub Actions / Check pylint

W1514: Using open without explicitly specifying an encoding (unspecified-encoding)
for line in f:
if "/docker/containers/" in line:
container_id = line.split("/docker/containers/")[-1]
return container_id.split("/")[0]
return None

@classmethod
def is_available(cls) -> bool:
"""Check if the HAOS log handler can be used."""
return cls.SOCKET_PATH.exists()

def emit(self, record: logging.LogRecord) -> None:
"""Emit formatted log record to the Systemd Journal.
If CONTAINER_ID is known, add it to the fields to make the log record
available through `docker logs` command.
"""
try:
formatted = self._format_record(record)
if self._container_id:
# only container ID is needed for interpretation through `docker logs`
formatted.append(("CONTAINER_ID", self._container_id))
self.transport.send(formatted)
except Exception:

Check warning on line 106 in supervisor/utils/logging.py

View workflow job for this annotation

GitHub Actions / Check pylint

W0718: Catching too general exception Exception (broad-exception-caught)
self._fallback(record)


def activate_log_queue_handler() -> None:
"""Migrate the existing log handlers to use the queue.
Expand Down
41 changes: 39 additions & 2 deletions supervisor/utils/systemd_journal.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,25 @@
from datetime import UTC, datetime
from functools import wraps
import json
import logging
from types import MappingProxyType

from aiohttp import ClientResponse
from colorlog.escape_codes import escape_codes, parse_colors

from supervisor.exceptions import MalformedBinaryEntryError
from supervisor.host.const import LogFormatter
from supervisor.utils.logging import HAOSLogHandler

_LOG_COLORS = MappingProxyType(
{
HAOSLogHandler.LEVELS[logging.DEBUG]: "cyan",
HAOSLogHandler.LEVELS[logging.INFO]: "green",
HAOSLogHandler.LEVELS[logging.WARNING]: "yellow",
HAOSLogHandler.LEVELS[logging.ERROR]: "red",
HAOSLogHandler.LEVELS[logging.CRITICAL]: "red",
}
)


def formatter(required_fields: list[str]):
Expand All @@ -24,15 +38,34 @@ def decorator(func):
def wrapper(*args, **kwargs):
return func(*args, **kwargs)

wrapper.required_fields = ["__CURSOR"] + required_fields
implicit_fields = ["__CURSOR", "SYSLOG_FACILITY", "PRIORITY"]
wrapper.required_fields = implicit_fields + required_fields
return wrapper

return decorator


def _entry_is_haos_log(entry: dict[str, str]) -> bool:
"""Check if the entry is a Home Assistant Operating System log entry."""
return entry.get("SYSLOG_FACILITY") == str(HAOSLogHandler.SYSLOG_FACILITY)
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It appears that nothing else is using this facility in HAOS currently so it's fairly reliable indicator it's "our" logs.. Facility.LOCAL7 == 23 is the default in JournaldLogHandler as well but I defined it on the HAOSLogHandler class to be explicit and stable.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hm, I see this is syslog backward compatibility, and the numbers are from RFC5424, interesting.

It seems in my logs that quite some entries omit any SYSLOG_FACILITY. I wonder if we really want to use that as the means to identify our logs? 🤔

I see the library currently maps the standard logging priorities to the systemd-journald priority levels, maybe it would be better to store the Python logging level (which also can have custom levels) as a separate field (e.,g. PYTHON_LEVEL), and use that as a means to detect "our" logs? 🤔 I guess it would involve some changes to the library, but could be a good way to test how active that upstream project is.



def colorize_message(message: str, priority: str | None = None) -> str:
"""Colorize a log message using ANSI escape codes based on its priority."""
if priority and priority.isdigit():
color = _LOG_COLORS.get(int(priority))
if color is not None:
escape_code = parse_colors(color)
return f"{escape_code}{message}{escape_codes['reset']}"
return message


@formatter(["MESSAGE"])
def journal_plain_formatter(entries: dict[str, str]) -> str:
"""Format parsed journal entries as a plain message."""
if _entry_is_haos_log(entries):
return colorize_message(entries["MESSAGE"], entries.get("PRIORITY"))

return entries["MESSAGE"]


Expand All @@ -58,7 +91,11 @@ def journal_verbose_formatter(entries: dict[str, str]) -> str:
else entries.get("SYSLOG_IDENTIFIER", "_UNKNOWN_")
)

return f"{ts} {entries.get('_HOSTNAME', '')} {identifier}: {entries.get('MESSAGE', '')}"
message = entries.get("MESSAGE", "")
if message and _entry_is_haos_log(entries):
message = colorize_message(message, entries.get("PRIORITY"))

return f"{ts} {entries.get('_HOSTNAME', '')} {identifier}: {message}"


async def journal_logs_reader(
Expand Down
Loading