diff --git a/.gitignore b/.gitignore index fbc86f6..72178af 100644 --- a/.gitignore +++ b/.gitignore @@ -11,10 +11,14 @@ env3/ # Bot local files *.db store/ +cache/ # Config file config.yaml +# Alertmanager config file +alertmanager.yml + # Python __pycache__/ *.egg-info/ diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000..435687d --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,2 @@ +include requirements.txt +recursive-include matrix_alertbot/resources * diff --git a/sample.config.yaml b/config.sample.yaml similarity index 51% rename from sample.config.yaml rename to config.sample.yaml index 8401ff3..8f1ae3c 100644 --- a/sample.config.yaml +++ b/config.sample.yaml @@ -3,34 +3,56 @@ # Default values are shown # The string to prefix messages with to talk to the bot in group chats -command_prefix: "!c" +command_prefix: "!alert" # Options for connecting to the bot's Matrix account matrix: # The Matrix User ID of the bot account - user_id: "@bot:example.com" + user_id: "@bot:matrix.example.com" # Matrix account password (optional if access token used) - user_password: "" + user_password: "password" # Matrix account access token (optional if password used) #user_token: "" # The URL of the homeserver to connect to - homeserver_url: https://example.com + url: https://matrix.example.com # The device ID that is **non pre-existing** device # If this device ID already exists, messages will be dropped silently in encrypted rooms device_id: ABCDEFGHIJ # What to name the logged in device device_name: matrix-alertbot + # List of rooms where the bot can interact + allowed_rooms: + - "!abcdefgh:matrix.example.com" + # List of allowed reactions to create silences. + # Default is listed here. + allowed_reactions: [🤫, 😶, 🤐, 🙊, 🔇, 🔕] + +webhook: + # Address and port for which the bot should listen to + address: 0.0.0.0 + port: 8080 + # Path to the socket for which the bot should listen to. + # This is mutually exclusive with webhook.address option. + socket: /path/to/matrix-alertbot.sock + +alertmanager: + # Url to Alertmanager server + url: http://localhost:9093 + +cache: + # The path to a directory for caching alerts and silences + path: "./cache" storage: - # The database connection string - # For SQLite3, this would look like: - # database: "sqlite://bot.db" - # For Postgres, this would look like: - # database: "postgres://username:password@localhost/dbname?sslmode=disable" - database: "sqlite://bot.db" # The path to a directory for internal bot storage # containing encryption keys, sync tokens, etc. - store_path: "./store" + path: "./store" + +template: + # Path to directory that contains templates for rendering alerts. + # The directory must contains the files "alert.html.j2" and "alert.txt.j2" and must respect Jinja2 templating format. + # Default is to use templates provided by the matrix_alertbot package. These templates are available in "matrix_alertbot/resources/templates". + path: "data/templates" # Logging setup logging: @@ -42,7 +64,7 @@ logging: # Whether logging to a file is enabled enabled: false # The path to the file to log to. May be relative or absolute - filepath: bot.log + filepath: matrix-alertbot.log # Configure logging to the console output console_logging: # Whether logging to the console is enabled diff --git a/docker/Dockerfile b/docker/Dockerfile index 916bc80..b08adb2 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -41,37 +41,41 @@ RUN apk add --no-cache \ yaml-dev \ python3-dev +ENV VIRTUAL_ENV="/opt/matrix_alertbot" +ENV PATH="$VIRTUAL_ENV/bin:$PATH" +RUN python -m venv $VIRTUAL_ENV + +WORKDIR "${VIRTUAL_ENV}" # Build libolm # # Also build the libolm python bindings and place them at /python-libs # We will later copy contents from both of these folders to the runtime # container COPY docker/build_and_install_libolm.sh /scripts/ -RUN /scripts/build_and_install_libolm.sh ${LIBOLM_VERSION} /python-libs +RUN /scripts/build_and_install_libolm.sh "${LIBOLM_VERSION}" "${VIRTUAL_ENV}" # Install Postgres dependencies RUN apk add --no-cache \ - musl-dev \ - libpq \ - postgresql-dev + musl-dev # Install python runtime modules. We do this before copying the source code # such that these dependencies can be cached # This speeds up subsequent image builds when the source code is changed -RUN mkdir -p /src/matrix_alertbot -COPY matrix_alertbot/__init__.py /src/matrix_alertbot/ -COPY README.md matrix-alertbot /src/ +RUN mkdir -p ./matrix_alertbot/ +COPY matrix_alertbot/__init__.py ./matrix_alertbot/ +COPY README.md matrix-alertbot ./ # Build the dependencies -COPY setup.py /src/setup.py -RUN pip install --prefix="/python-libs" --no-warn-script-location "/src/.[postgres]" +COPY setup.py ./setup.py +RUN pip install --no-warn-script-location ".[e2e]" # Now copy the source code -COPY *.py *.md /src/ -COPY matrix_alertbot/*.py /src/matrix_alertbot/ +COPY *.py *.md *.in ./ +COPY matrix_alertbot/*.py ./matrix_alertbot/ +COPY matrix_alertbot/resources ./matrix_alertbot/resources # And build the final module -RUN pip install --prefix="/python-libs" --no-warn-script-location "/src/.[postgres]" +RUN pip install --no-warn-script-location ".[e2e]" ## ## Creating the runtime container @@ -81,21 +85,22 @@ RUN pip install --prefix="/python-libs" --no-warn-script-location "/src/.[postgr # python dependencies that we built above to this container FROM docker.io/python:${PYTHON_VERSION}-alpine +ENV VIRTUAL_ENV="/opt/matrix_alertbot" +ENV PATH="$VIRTUAL_ENV/bin:$PATH" + # Copy python dependencies from the "builder" container -COPY --from=builder /python-libs /usr/local +COPY --from=builder /opt/matrix_alertbot /opt/matrix_alertbot # Copy libolm from the "builder" container COPY --from=builder /usr/local/lib/libolm* /usr/local/lib/ # Install any native runtime dependencies RUN apk add --no-cache \ - libstdc++ \ - libpq \ - postgresql-dev + libstdc++ # Specify a volume that holds the config file, SQLite3 database, # and the matrix-nio store VOLUME ["/data"] # Start the bot -ENTRYPOINT ["matrix-alertbot", "/data/config.yaml"] +CMD ["matrix-alertbot", "/data/config.yaml"] diff --git a/docker/Dockerfile.dev b/docker/Dockerfile.dev deleted file mode 100644 index ca11d32..0000000 --- a/docker/Dockerfile.dev +++ /dev/null @@ -1,73 +0,0 @@ -# This dockerfile is crafted specifically for development purposes. -# Please use `Dockerfile` instead if you wish to deploy for production. -# -# This file differs as it does not use a builder container, nor does it -# reinstall the project's python package after copying the source code, -# saving significant time during rebuilds. -# -# To build the image, run `docker build` command from the root of the -# repository: -# -# docker build -f docker/Dockerfile . -# -# There is an optional PYTHON_VERSION build argument which sets the -# version of python to build against. For example: -# -# docker build -f docker/Dockerfile --build-arg PYTHON_VERSION=3.10 . -# -# An optional LIBOLM_VERSION build argument which sets the -# version of libolm to build against. For example: -# -# docker build -f docker/Dockerfile --build-arg LIBOLM_VERSION=3.2.10 . -# - -ARG PYTHON_VERSION=3.10 -FROM docker.io/python:${PYTHON_VERSION}-alpine - -## -## Build libolm for matrix-nio e2e support -## - -# Install libolm build dependencies -ARG LIBOLM_VERSION=3.2.10 -RUN apk add --no-cache \ - make \ - cmake \ - gcc \ - g++ \ - git \ - libffi-dev \ - yaml-dev \ - python3-dev - -# Build libolm -COPY docker/build_and_install_libolm.sh /scripts/ -RUN /scripts/build_and_install_libolm.sh ${LIBOLM_VERSION} - -# Install native runtime dependencies -RUN apk add --no-cache \ - musl-dev \ - libpq \ - postgresql-dev \ - libstdc++ - -# Install python runtime modules. We do this before copying the source code -# such that these dependencies can be cached -RUN mkdir -p /src/matrix_alertbot -COPY matrix_alertbot/__init__.py /src/matrix_alertbot/ -COPY README.md matrix-alertbot /src/ -COPY setup.py /src/setup.py -RUN pip install -e "/src/.[postgres]" - -# Now copy the source code -COPY matrix_alertbot/*.py /src/matrix_alertbot/ -COPY *.py /src/ - -# Specify a volume that holds the config file, SQLite3 database, -# and the matrix-nio store -VOLUME ["/data"] - -# Start the app -ENTRYPOINT ["matrix-alertbot", "/data/config.yaml"] - -EXPOSE 8080 diff --git a/docker/alertmanager/alertmanager.sample.yml b/docker/alertmanager/alertmanager.sample.yml new file mode 100644 index 0000000..a058c5a --- /dev/null +++ b/docker/alertmanager/alertmanager.sample.yml @@ -0,0 +1,11 @@ +route: + group_by: ["alertname"] + group_wait: 30s + group_interval: 5m + repeat_interval: 1h + receiver: "default-receiver" +receivers: + - name: "default-receiver" + webhook_configs: + - url: http://matrix-alertbot:8080/alerts/ + send_resolved: true diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 27e0095..53687db 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -4,13 +4,42 @@ volumes: # Set up with `docker volume create ...`. See docker/README.md for more info. matrix-alertbot: external: true - pg_data_volume: + +networks: + matrix-alertbot: + name: matrix-alertbot services: + prometheus: + image: prom/prometheus + restart: always + networks: + - matrix-alertbot + volumes: + - ./prometheus/prometheus.yml:/etc/prometheus/prometheus.yml + - ./prometheus/rules.d:/etc/prometheus/rules.d + + alertmanager: + image: prom/alertmanager + depends_on: + - prometheus + restart: always + networks: + - matrix-alertbot + volumes: + - ./alertmanager/alertmanager.yml:/etc/alertmanager/alertmanager.yml + # Runs from the latest release matrix-alertbot: image: neutrinet/matrix-alertbot + build: + context: .. + dockerfile: docker/Dockerfile + depends_on: + - alertmanager restart: always + networks: + - matrix-alertbot volumes: - matrix-alertbot:/data # Used for allowing connections to homeservers hosted on the host machine @@ -20,45 +49,3 @@ services: extra_hosts: - "localhost:${HOST_IP_ADDRESS}" - # Builds and runs an optimized container from local code - local-checkout: - build: - context: .. - dockerfile: docker/Dockerfile - # Build arguments may be specified here - # args: - # PYTHON_VERSION: 3.8 - volumes: - - matrix-alertbot:/data - # Used for allowing connections to homeservers hosted on the host machine - # (while docker host networking mode is still broken on Linux). - # - # Defaults to 127.0.0.1 and is set in docker/.env - extra_hosts: - - "localhost:${HOST_IP_ADDRESS}" - - # Builds and runs a development container from local code - local-checkout-dev: - build: - context: .. - dockerfile: docker/Dockerfile.dev - # Build arguments may be specified here - # args: - # PYTHON_VERSION: 3.8 - volumes: - - matrix-alertbot:/data - # Used for allowing connections to homeservers hosted on the host machine - # (while docker host networking mode is still broken on Linux). - # - # Defaults to 127.0.0.1 and is set in docker/.env - extra_hosts: - - "localhost:${HOST_IP_ADDRESS}" - - # Starts up a postgres database - postgres: - image: postgres - restart: always - volumes: - - pg_data_volume:/var/lib/postgresql/data - environment: - POSTGRES_PASSWORD: somefancypassword diff --git a/docker/prometheus/prometheus.yml b/docker/prometheus/prometheus.yml new file mode 100644 index 0000000..b47025a --- /dev/null +++ b/docker/prometheus/prometheus.yml @@ -0,0 +1,27 @@ +global: + scrape_interval: 15s # Set the scrape interval to every 15 seconds. Default is every 1 minute. + evaluation_interval: 15s # Evaluate rules every 15 seconds. The default is every 1 minute. + # scrape_timeout is set to the global default (10s). + +# Alertmanager configuration +alerting: + alertmanagers: + - static_configs: + - targets: + - alertmanager:9093 + +# Load rules once and periodically evaluate them according to the global 'evaluation_interval'. +rule_files: + - rules.d/*.yml + +# A scrape configuration containing exactly one endpoint to scrape: +# Here it's Prometheus itself. +scrape_configs: + # The job name is added as a label `job=` to any timeseries scraped from this config. + - job_name: "prometheus" + + # metrics_path defaults to '/metrics' + # scheme defaults to 'http'. + + static_configs: + - targets: ["localhost:9090"] diff --git a/docker/prometheus/rules.d/health.yml b/docker/prometheus/rules.d/health.yml new file mode 100644 index 0000000..e321057 --- /dev/null +++ b/docker/prometheus/rules.d/health.yml @@ -0,0 +1,11 @@ +groups: + - name: Health + rules: + - alert: Instance Up + for: 30s + expr: up == 1 + labels: + severity: critical + annotations: + description: 'Instance {{ $labels.instance }} is up' + summary: 'Instance is up' diff --git a/matrix_alertbot/config.py b/matrix_alertbot/config.py index 46a0d57..52faad6 100644 --- a/matrix_alertbot/config.py +++ b/matrix_alertbot/config.py @@ -4,7 +4,7 @@ import re import sys from typing import Any, List, Optional -import pytimeparse +import pytimeparse2 import yaml from matrix_alertbot.errors import ( @@ -84,7 +84,7 @@ class Config: # Cache setup self.cache_dir: str = self._get_cfg(["cache", "path"], required=True) expire_time: str = self._get_cfg(["cache", "expire_time"], default="1w") - self.cache_expire_time = pytimeparse.parse(expire_time) + self.cache_expire_time = pytimeparse2.parse(expire_time) # Alertmanager client setup self.alertmanager_url: str = self._get_cfg( diff --git a/matrix_alertbot/main.py b/matrix_alertbot/main.py index 1b246df..6537d73 100644 --- a/matrix_alertbot/main.py +++ b/matrix_alertbot/main.py @@ -28,12 +28,21 @@ logger = logging.getLogger(__name__) def create_matrix_client(config: Config) -> AsyncClient: # Configuration options for the AsyncClient - matrix_client_config = AsyncClientConfig( - max_limit_exceeded=0, - max_timeouts=0, - store_sync_tokens=True, - encryption_enabled=True, - ) + try: + matrix_client_config = AsyncClientConfig( + max_limit_exceeded=0, + max_timeouts=0, + store_sync_tokens=True, + encryption_enabled=True, + ) + except ImportWarning as e: + logger.warning(e) + matrix_client_config = AsyncClientConfig( + max_limit_exceeded=0, + max_timeouts=0, + store_sync_tokens=True, + encryption_enabled=False, + ) # Initialize the matrix client matrix_client = AsyncClient( diff --git a/setup.py b/setup.py index 2fdc3b5..ad5ca7f 100644 --- a/setup.py +++ b/setup.py @@ -2,9 +2,10 @@ import os from typing import Tuple -import pkg_resources from setuptools import find_packages, setup +from matrix_alertbot import __version__ + def read_file(path_segments: Tuple) -> str: """Read a file from the package. Takes a list of strings to join to @@ -14,7 +15,7 @@ def read_file(path_segments: Tuple) -> str: return f.read() -version = pkg_resources.require("matrix_alertbot")[0].version +version = __version__ long_description = read_file(("README.md",)) @@ -24,6 +25,7 @@ setup( url="https://github.com/anoadragon453/nio-template", description="A matrix bot to do amazing things!", packages=find_packages(exclude=["tests", "tests.*"]), + include_package_data=True, install_requires=[ "aiohttp>=3.8.1", "aiohttp-prometheus-exporter>=0.2.4", @@ -33,7 +35,7 @@ setup( "matrix-nio>=0.19.0", "Markdown>=3.3.7", "pytimeparse2>=1.4.0", - "PyYAML>=6.0", + "PyYAML>=5.4.1", "typing-extensions>=4.3.0", ], extras_require={ diff --git a/tests/test_alertmanager.py b/tests/test_alertmanager.py index 96c4a0c..0e27f6a 100644 --- a/tests/test_alertmanager.py +++ b/tests/test_alertmanager.py @@ -43,7 +43,6 @@ class FakeCache: def set(self, key: str, value: str, expire: int) -> None: self.cache[key] = value, expire - print(self.cache) class AbstractFakeAlertmanagerServer: