2024-01-22 11:35:13 +01:00
|
|
|
from __future__ import annotations
|
|
|
|
|
2019-09-25 14:26:29 +02:00
|
|
|
import logging
|
|
|
|
import os
|
2020-08-10 00:02:07 +02:00
|
|
|
import re
|
2019-09-25 14:26:29 +02:00
|
|
|
import sys
|
2024-08-04 11:08:43 +02:00
|
|
|
from typing import Any, Dict, List, Optional, TypeVar
|
2020-08-10 00:02:07 +02:00
|
|
|
|
2022-08-08 16:43:05 +02:00
|
|
|
import pytimeparse2
|
2020-08-10 00:02:07 +02:00
|
|
|
import yaml
|
|
|
|
|
2022-07-09 12:22:05 +02:00
|
|
|
from matrix_alertbot.errors import (
|
|
|
|
InvalidConfigError,
|
|
|
|
ParseConfigError,
|
|
|
|
RequiredConfigKeyError,
|
|
|
|
)
|
2019-09-25 14:26:29 +02:00
|
|
|
|
|
|
|
logger = logging.getLogger()
|
2021-01-04 05:34:56 +01:00
|
|
|
logging.getLogger("peewee").setLevel(
|
|
|
|
logging.INFO
|
|
|
|
) # Prevent debug messages from peewee lib
|
2019-09-25 14:26:29 +02:00
|
|
|
|
|
|
|
|
2024-04-17 19:22:42 +02:00
|
|
|
DEFAULT_REACTIONS = {
|
|
|
|
"🤫",
|
|
|
|
"😶",
|
|
|
|
"🤐",
|
|
|
|
"🙊",
|
|
|
|
"🔇",
|
|
|
|
"🔕",
|
|
|
|
"🚮",
|
|
|
|
"⛔",
|
|
|
|
"🚫",
|
|
|
|
"🤬",
|
|
|
|
"🫥",
|
|
|
|
"😶🌫️",
|
|
|
|
"🫣",
|
|
|
|
"🫢",
|
|
|
|
"😪",
|
|
|
|
"😴",
|
|
|
|
"💤",
|
|
|
|
"🥱",
|
|
|
|
"🤌",
|
|
|
|
"🤏",
|
|
|
|
"🤚",
|
|
|
|
"👎",
|
|
|
|
"🖕",
|
|
|
|
}
|
|
|
|
|
|
|
|
INSULT_REACTIONS = {
|
|
|
|
"🤬",
|
|
|
|
"🤌",
|
|
|
|
"🖕",
|
|
|
|
}
|
2022-07-28 17:39:47 +02:00
|
|
|
|
2024-08-04 11:08:43 +02:00
|
|
|
K = TypeVar("K")
|
|
|
|
V = TypeVar("V")
|
|
|
|
|
|
|
|
|
|
|
|
class BiDict(dict[K, V]):
|
|
|
|
def __init__(self, *args, **kwargs):
|
|
|
|
super(BiDict, self).__init__(*args, **kwargs)
|
|
|
|
self.inverse = {}
|
|
|
|
for key, value in self.items():
|
|
|
|
self.inverse.setdefault(value, set()).add(key)
|
|
|
|
|
|
|
|
def __setitem__(self, key: K, value: V):
|
|
|
|
if key in self:
|
|
|
|
self.inverse[self[key]].remove(key)
|
|
|
|
super(BiDict, self).__setitem__(key, value)
|
|
|
|
self.inverse.setdefault(value, set()).add(key)
|
|
|
|
|
|
|
|
def __delitem__(self, key: K):
|
|
|
|
self.inverse.setdefault(self[key], set()).remove(key)
|
|
|
|
if self[key] in self.inverse and not self.inverse[self[key]]:
|
|
|
|
del self.inverse[self[key]]
|
|
|
|
super(BiDict, self).__delitem__(key)
|
|
|
|
|
2022-07-28 17:39:47 +02:00
|
|
|
|
2024-01-22 11:35:13 +01:00
|
|
|
class AccountConfig:
|
|
|
|
def __init__(self, account: Dict[str, str]) -> None:
|
|
|
|
self.id: str = account["id"]
|
|
|
|
if not re.match("@.+:.+", self.id):
|
|
|
|
raise InvalidConfigError("matrix.user_id must be in the form @name:domain")
|
|
|
|
|
|
|
|
self.password: Optional[str] = account.get("password")
|
|
|
|
self.token: Optional[str] = account.get("token")
|
|
|
|
|
|
|
|
if self.password is None and self.token is None:
|
|
|
|
raise RequiredConfigKeyError("Must supply either user token or password")
|
|
|
|
|
|
|
|
self.device_id: Optional[str] = account.get("device_id")
|
|
|
|
self.token_file: str = account.get("token_file", "token.json")
|
|
|
|
|
|
|
|
self.homeserver_url: str = account["url"]
|
|
|
|
|
|
|
|
def __repr__(self) -> str:
|
|
|
|
return f"{self.__class__.__name__}({self.id})"
|
|
|
|
|
|
|
|
|
2021-01-10 04:30:07 +01:00
|
|
|
class Config:
|
2021-01-11 01:22:10 +01:00
|
|
|
"""Creates a Config object from a YAML-encoded config file from a given filepath"""
|
|
|
|
|
2021-01-10 04:30:07 +01:00
|
|
|
def __init__(self, filepath: str):
|
2021-01-11 01:22:10 +01:00
|
|
|
self.filepath = filepath
|
2019-09-25 14:26:29 +02:00
|
|
|
if not os.path.isfile(filepath):
|
2022-07-09 12:22:05 +02:00
|
|
|
raise ParseConfigError(f"Config file '{filepath}' does not exist")
|
2019-09-25 14:26:29 +02:00
|
|
|
|
|
|
|
# Load in the config file at the given filepath
|
|
|
|
with open(filepath) as file_stream:
|
2021-01-11 01:22:10 +01:00
|
|
|
self.config_dict = yaml.safe_load(file_stream.read())
|
|
|
|
|
|
|
|
# Parse and validate config options
|
|
|
|
self._parse_config_values()
|
2019-09-25 14:26:29 +02:00
|
|
|
|
2022-06-14 23:37:54 +02:00
|
|
|
def _parse_config_values(self) -> None:
|
2021-01-11 01:22:10 +01:00
|
|
|
"""Read and validate each config option"""
|
2019-09-25 14:26:29 +02:00
|
|
|
# Logging setup
|
2020-08-10 00:02:07 +02:00
|
|
|
formatter = logging.Formatter(
|
|
|
|
"%(asctime)s | %(name)s [%(levelname)s] %(message)s"
|
|
|
|
)
|
2019-09-25 14:26:29 +02:00
|
|
|
|
2024-04-17 15:56:43 +02:00
|
|
|
# this must be DEBUG to allow debug messages
|
|
|
|
# actual log levels are defined in the handlers below
|
|
|
|
logger.setLevel("DEBUG")
|
2019-09-25 14:26:29 +02:00
|
|
|
|
2020-08-10 00:02:07 +02:00
|
|
|
file_logging_enabled = self._get_cfg(
|
|
|
|
["logging", "file_logging", "enabled"], default=False
|
|
|
|
)
|
|
|
|
file_logging_filepath = self._get_cfg(
|
2022-07-09 12:22:05 +02:00
|
|
|
["logging", "file_logging", "filepath"], default="matrix-alertbot.log"
|
2020-08-10 00:02:07 +02:00
|
|
|
)
|
2024-01-22 11:35:13 +01:00
|
|
|
file_logging_log_level = self._get_cfg(
|
2024-04-17 15:56:43 +02:00
|
|
|
["logging", "file_logging", "level"], default="INFO"
|
2024-01-22 11:35:13 +01:00
|
|
|
)
|
2019-09-25 14:26:29 +02:00
|
|
|
if file_logging_enabled:
|
2022-06-14 23:37:54 +02:00
|
|
|
file_handler = logging.FileHandler(file_logging_filepath)
|
|
|
|
file_handler.setFormatter(formatter)
|
2024-01-22 11:35:13 +01:00
|
|
|
if file_logging_log_level:
|
|
|
|
file_handler.setLevel(file_logging_log_level)
|
2022-06-14 23:37:54 +02:00
|
|
|
logger.addHandler(file_handler)
|
2019-09-25 14:26:29 +02:00
|
|
|
|
2020-08-10 00:02:07 +02:00
|
|
|
console_logging_enabled = self._get_cfg(
|
|
|
|
["logging", "console_logging", "enabled"], default=True
|
|
|
|
)
|
2024-01-22 11:35:13 +01:00
|
|
|
console_logging_log_level = self._get_cfg(
|
2024-04-17 15:56:43 +02:00
|
|
|
["logging", "console_logging", "level"], default="INFO"
|
2024-01-22 11:35:13 +01:00
|
|
|
)
|
2019-09-25 14:26:29 +02:00
|
|
|
if console_logging_enabled:
|
2022-06-14 23:37:54 +02:00
|
|
|
console_handler = logging.StreamHandler(sys.stdout)
|
|
|
|
console_handler.setFormatter(formatter)
|
2024-01-22 11:35:13 +01:00
|
|
|
if console_logging_log_level:
|
|
|
|
console_handler.setLevel(console_logging_log_level)
|
2022-06-14 23:37:54 +02:00
|
|
|
logger.addHandler(console_handler)
|
2019-09-25 14:26:29 +02:00
|
|
|
|
2020-02-24 23:13:28 +01:00
|
|
|
# Storage setup
|
2022-07-09 12:22:05 +02:00
|
|
|
self.store_dir: str = self._get_cfg(["storage", "path"], required=True)
|
2019-09-25 14:26:29 +02:00
|
|
|
|
2020-02-25 00:56:09 +01:00
|
|
|
# Create the store folder if it doesn't exist
|
2022-07-09 12:22:05 +02:00
|
|
|
if not os.path.isdir(self.store_dir):
|
|
|
|
if not os.path.exists(self.store_dir):
|
|
|
|
os.mkdir(self.store_dir)
|
2020-02-25 00:56:09 +01:00
|
|
|
else:
|
2022-07-09 12:22:05 +02:00
|
|
|
raise InvalidConfigError(
|
|
|
|
f"storage.path '{self.store_dir}' is not a directory"
|
2020-08-10 00:02:07 +02:00
|
|
|
)
|
2020-02-25 00:56:09 +01:00
|
|
|
|
2022-07-28 17:39:47 +02:00
|
|
|
# Template setup
|
|
|
|
self.template_dir: str = self._get_cfg(["template", "path"], required=False)
|
|
|
|
|
2022-07-04 01:03:24 +02:00
|
|
|
# Cache setup
|
2022-07-09 12:22:05 +02:00
|
|
|
self.cache_dir: str = self._get_cfg(["cache", "path"], required=True)
|
|
|
|
expire_time: str = self._get_cfg(["cache", "expire_time"], default="1w")
|
2022-08-08 16:43:05 +02:00
|
|
|
self.cache_expire_time = pytimeparse2.parse(expire_time)
|
2022-07-04 01:03:24 +02:00
|
|
|
|
|
|
|
# Alertmanager client setup
|
2022-07-09 12:22:05 +02:00
|
|
|
self.alertmanager_url: str = self._get_cfg(
|
|
|
|
["alertmanager", "url"], required=True
|
|
|
|
)
|
2020-08-16 16:51:59 +02:00
|
|
|
|
2024-01-22 11:35:13 +01:00
|
|
|
# Matrix bot accounts setup
|
|
|
|
self.accounts: List[AccountConfig] = []
|
|
|
|
accounts_dict: list = self._get_cfg(["matrix", "accounts"], required=True)
|
|
|
|
for i, account_dict in enumerate(accounts_dict):
|
|
|
|
try:
|
|
|
|
account = AccountConfig(account_dict)
|
|
|
|
except KeyError as e:
|
|
|
|
key_name = e.args[0]
|
|
|
|
raise RequiredConfigKeyError(
|
|
|
|
f"Config option matrix.accounts.{i}.{key_name} is required"
|
|
|
|
)
|
|
|
|
self.accounts.append(account)
|
|
|
|
self.user_ids = {account.id for account in self.accounts}
|
2022-07-09 12:22:05 +02:00
|
|
|
self.device_name: str = self._get_cfg(
|
|
|
|
["matrix", "device_name"], default="matrix-alertbot"
|
2020-08-10 00:02:07 +02:00
|
|
|
)
|
2022-07-28 14:37:23 +02:00
|
|
|
self.allowed_rooms: list = self._get_cfg(
|
|
|
|
["matrix", "allowed_rooms"], required=True
|
|
|
|
)
|
2022-07-28 17:39:47 +02:00
|
|
|
self.allowed_reactions = set(
|
|
|
|
self._get_cfg(["matrix", "allowed_reactions"], default=DEFAULT_REACTIONS)
|
|
|
|
)
|
2024-04-17 19:22:42 +02:00
|
|
|
self.insult_reactions = set(
|
|
|
|
self._get_cfg(["matrix", "insult_reactions"], default=INSULT_REACTIONS)
|
|
|
|
)
|
2022-07-04 01:03:24 +02:00
|
|
|
|
2022-07-09 12:22:05 +02:00
|
|
|
self.address: str = self._get_cfg(["webhook", "address"], required=False)
|
|
|
|
self.port: int = self._get_cfg(["webhook", "port"], required=False)
|
|
|
|
self.socket: str = self._get_cfg(["webhook", "socket"], required=False)
|
2022-07-04 01:03:24 +02:00
|
|
|
if (
|
|
|
|
not (self.address or self.port or self.socket)
|
|
|
|
or (self.address and not self.port)
|
|
|
|
or (not self.address and self.port)
|
|
|
|
):
|
2022-07-09 12:22:05 +02:00
|
|
|
raise RequiredConfigKeyError(
|
2022-07-04 01:03:24 +02:00
|
|
|
"Must supply either webhook.socket or both webhook.address and webhook.port"
|
|
|
|
)
|
2022-07-09 12:22:05 +02:00
|
|
|
elif self.socket and self.address and self.port:
|
|
|
|
raise InvalidConfigError(
|
|
|
|
"Supplied both webhook.socket and both webhook.address"
|
|
|
|
)
|
|
|
|
|
2024-08-04 11:08:43 +02:00
|
|
|
self.dm_users: BiDict[str, str] = BiDict()
|
|
|
|
for user in self._get_cfg(["dm", "users"], default=[]):
|
|
|
|
for user_id in user["user_id"]:
|
|
|
|
self.dm_users[user_id] = user["matrix_id"]
|
|
|
|
|
|
|
|
self.dm_room_title: str = self._get_cfg(["dm", "room_title"], required=False)
|
|
|
|
filter_labels: Dict[str, str] = self._get_cfg(
|
|
|
|
["dm", "filter_labels"], default={}, required=False
|
|
|
|
)
|
|
|
|
self.dm_filter_labels: Dict[str, re.Pattern[str]] = {}
|
|
|
|
for label_name, pattern in filter_labels.items():
|
|
|
|
self.dm_filter_labels[label_name] = re.compile(pattern)
|
|
|
|
|
|
|
|
self.dm_select_label: str = self._get_cfg(
|
|
|
|
["dm", "select_label"], required=False
|
|
|
|
)
|
|
|
|
|
2020-02-24 00:17:25 +01:00
|
|
|
def _get_cfg(
|
2021-01-04 05:34:56 +01:00
|
|
|
self,
|
|
|
|
path: List[str],
|
2021-01-10 04:30:07 +01:00
|
|
|
default: Optional[Any] = None,
|
2022-07-09 12:22:05 +02:00
|
|
|
required: bool = True,
|
2020-02-24 00:17:25 +01:00
|
|
|
) -> Any:
|
|
|
|
"""Get a config option from a path and option name, specifying whether it is
|
|
|
|
required.
|
|
|
|
|
|
|
|
Raises:
|
2022-07-09 12:22:05 +02:00
|
|
|
RequiredConfigKeyError: If required is True and the object is not found (and there is
|
2021-01-10 04:30:07 +01:00
|
|
|
no default value provided), a ConfigError will be raised.
|
2020-02-24 00:17:25 +01:00
|
|
|
"""
|
|
|
|
# Sift through the the config until we reach our option
|
2021-01-11 01:22:10 +01:00
|
|
|
config = self.config_dict
|
2020-02-24 00:17:25 +01:00
|
|
|
for name in path:
|
|
|
|
config = config.get(name)
|
|
|
|
|
|
|
|
# If at any point we don't get our expected option...
|
|
|
|
if config is None:
|
|
|
|
# Raise an error if it was required
|
2022-07-09 12:22:05 +02:00
|
|
|
if required and default is None:
|
|
|
|
raise RequiredConfigKeyError(
|
|
|
|
f"Config option {'.'.join(path)} is required"
|
|
|
|
)
|
2020-02-24 00:17:25 +01:00
|
|
|
|
|
|
|
# or return the default value
|
|
|
|
return default
|
2019-09-25 14:26:29 +02:00
|
|
|
|
2021-01-10 04:30:07 +01:00
|
|
|
# We found the option. Return it.
|
2020-02-24 00:17:25 +01:00
|
|
|
return config
|