diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 6cda68b..70a2923 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -20,6 +20,7 @@ install: stage: install script: - make venv + - make develop cache: <<: *global_cache policy: push diff --git a/Makefile b/Makefile index 8e70848..13ba396 100644 --- a/Makefile +++ b/Makefile @@ -9,6 +9,7 @@ NC=\033[0m # No Color venv: ## Create the venv python3 -m venv venv +develop: venv ## Install the dev dependencies venv/bin/pip install -e ".[dev,docs]" docs: cog ## Build the docs venv/bin/sphinx-build docs public @@ -19,11 +20,11 @@ cog: ## Run cog, to integrate the CLI options to the docs. venv/bin/cog -r docs/*.md tests: venv ## Run the tests venv/bin/pytest -djlint: venv +djlint: venv ## Format the templates venv/bin/djlint --ignore=H030,H031 --lint argos/server/templates/*html -pylint: venv +pylint: venv ## Runs pylint on the code venv/bin/pylint argos - +lint: djlint pylint help: @python3 -c "$$PRINT_HELP_PYSCRIPT" < $(MAKEFILE_LIST) diff --git a/alembic.ini b/alembic.ini new file mode 100644 index 0000000..afb61ac --- /dev/null +++ b/alembic.ini @@ -0,0 +1,41 @@ +[alembic] +script_location = alembic +prepend_sys_path = . + +version_path_separator = os +sqlalchemy.url = sqlite:////tmp/argos.db + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/alembic/env.py b/alembic/env.py new file mode 100644 index 0000000..347ff3f --- /dev/null +++ b/alembic/env.py @@ -0,0 +1,62 @@ +from logging.config import fileConfig + +from alembic import context +from argos.server.models import Base +from sqlalchemy import engine_from_config, pool + +config = context.config + +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +target_metadata = Base.metadata + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + connectable = engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + context.configure(connection=connection, target_metadata=target_metadata) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/alembic/script.py.mako b/alembic/script.py.mako new file mode 100644 index 0000000..fbc4b07 --- /dev/null +++ b/alembic/script.py.mako @@ -0,0 +1,26 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/alembic/versions/7d480e6f1112_initial_migrations.py b/alembic/versions/7d480e6f1112_initial_migrations.py new file mode 100644 index 0000000..87bb415 --- /dev/null +++ b/alembic/versions/7d480e6f1112_initial_migrations.py @@ -0,0 +1,63 @@ +"""Initial migrations + +Revision ID: 7d480e6f1112 +Revises: +Create Date: 2023-12-16 23:33:40.059077 + +""" +from typing import Sequence, Union + +import sqlalchemy as sa + +from alembic import op + +# revision identifiers, used by Alembic. +revision: str = "7d480e6f1112" +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + op.create_table( + "tasks", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("url", sa.String(), nullable=False), + sa.Column("domain", sa.String(), nullable=False), + sa.Column("check", sa.String(), nullable=False), + sa.Column("expected", sa.String(), nullable=False), + sa.Column("frequency", sa.Integer(), nullable=False), + sa.Column("selected_by", sa.String(), nullable=True), + sa.Column("selected_at", sa.DateTime(), nullable=True), + sa.Column("completed_at", sa.DateTime(), nullable=True), + sa.Column("next_run", sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint("id"), + ) + op.create_table( + "results", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("task_id", sa.Integer(), nullable=False), + sa.Column("agent_id", sa.String(), nullable=True), + sa.Column("submitted_at", sa.DateTime(), nullable=False), + sa.Column( + "status", + sa.Enum("success", "failure", "error", "on-check", name="status"), + nullable=False, + ), + sa.Column( + "severity", + sa.Enum("ok", "warning", "critical", name="severity"), + nullable=False, + ), + sa.Column("context", sa.JSON(), nullable=False), + sa.ForeignKeyConstraint( + ["task_id"], + ["tasks.id"], + ), + sa.PrimaryKeyConstraint("id"), + ) + + +def downgrade() -> None: + op.drop_table("results") + op.drop_table("tasks") diff --git a/argos/commands.py b/argos/commands.py index 1e94071..63b1bd9 100644 --- a/argos/commands.py +++ b/argos/commands.py @@ -1,5 +1,6 @@ import asyncio import os +from functools import wraps import click import uvicorn @@ -8,16 +9,22 @@ from argos import logging from argos.agent import ArgosAgent -def validate_max_lock_seconds(ctx, param, value): - if value <= 60: - raise click.BadParameter("Should be strictly higher than 60") - return value +async def get_db(): + from argos.server.main import connect_to_db, get_application, setup_database + + app = get_application() + setup_database(app) + return await connect_to_db(app) -def validate_max_results(ctx, param, value): - if value <= 0: - raise click.BadParameter("Should be a positive integer") - return value +def coroutine(f): + """Decorator to enable async functions in click""" + + @wraps(f) + def wrapper(*args, **kwargs): + return asyncio.run(f(*args, **kwargs)) + + return wrapper @click.group() @@ -79,8 +86,25 @@ def start(host, port, config, reload): uvicorn.run("argos.server:app", host=host, port=port, reload=reload) +def validate_max_lock_seconds(ctx, param, value): + if value <= 60: + raise click.BadParameter("Should be strictly higher than 60") + return value + + +def validate_max_results(ctx, param, value): + if value <= 0: + raise click.BadParameter("Should be a positive integer") + return value + + @server.command() -@click.option("--max-results", default=100, help="Number of results per task to keep") +@click.option( + "--max-results", + default=100, + help="Number of results per task to keep", + callback=validate_max_results, +) @click.option( "--max-lock-seconds", default=100, @@ -88,7 +112,8 @@ def start(host, port, config, reload): "(the checks have a timeout value of 60 seconds)", callback=validate_max_lock_seconds, ) -def cleandb(max_results, max_lock_seconds): +@coroutine +async def cleandb(max_results, max_lock_seconds): """Clean the database (to run routinely) \b @@ -97,19 +122,13 @@ def cleandb(max_results, max_lock_seconds): """ # The imports are made here otherwise the agent will need server configuration files. from argos.server import queries - from argos.server.main import connect_to_db, get_application, setup_database - async def clean_old_results(): - app = get_application() - setup_database(app) - db = await connect_to_db(app) - removed = await queries.remove_old_results(db, max_results) - updated = await queries.release_old_locks(db, max_lock_seconds) + db = await get_db() + removed = await queries.remove_old_results(db, max_results) + updated = await queries.release_old_locks(db, max_lock_seconds) - click.echo(f"{removed} results removed") - click.echo(f"{updated} locks released") - - asyncio.run(clean_old_results()) + click.echo(f"{removed} results removed") + click.echo(f"{updated} locks released") if __name__ == "__main__": diff --git a/config.yaml b/config.yaml index 634628b..5c71b89 100644 --- a/config.yaml +++ b/config.yaml @@ -1,25 +1,64 @@ general: - frequency: "5m" # Run checks every minute. + frequency: "1m" # Run checks every minute. + # Which way do you want to be warned when a check goes to that severity? alerts: - error: + ok: - local warning: - local - alert: + critical: - local + unknown: + - local +# mail: +# mailfrom: no-reply@example.org +# host: 127.0.0.1 +# port: 25 +# ssl: False +# starttls: False +# auth: +# login: foo +# password: bar +# addresses: +# - foo@admin.example.org +# - bar@admin.example.org +# gotify: +# - url: https://example.org +# tokens: +# - foo +# - bar + service: secrets: - - Shorts-Tribunal-Plentiful-Penknife-Lazily-Move0 + - "1234" # Secrets can be generated using `openssl rand -base64 32`. ssl: thresholds: - - "15d": critical - - "25d": warning + - "1d": critical + - "5d": warning # It's also possible to define the checks in another file # with the include syntax: # # websites: !include websites.yaml # -websites: !include websites.yaml +websites: + - domain: "https://mypads.example.org" + paths: + - path: "/mypads/" + checks: + - status-is: 200 + - body-contains: '
' + - ssl-certificate-expiration: "on-check" + - path: "/admin/" + checks: + - status-is: 401 + - domain: "https://munin.example.org" + paths: + - path: "/" + checks: + - status-is: 301 + - path: "/munin/" + checks: + - status-is: 401 diff --git a/docs/developer/migrations.md b/docs/developer/migrations.md new file mode 100644 index 0000000..12746cd --- /dev/null +++ b/docs/developer/migrations.md @@ -0,0 +1,11 @@ +# Adding a database migration + +We are using [Alembic](https://alembic.sqlalchemy.org) to handle the database +migrations. Here is how to proceed in order to add a new migration: + +First, do your changes in the code, change the model, add new tables, etc. Once +you're done, you can create a new migration. + +```bash +venv/bin/alembic revision --autogenerate -m "migration reason" +``` diff --git a/pyproject.toml b/pyproject.toml index 67da05e..a9375e7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -17,43 +17,44 @@ classifiers = [ ] dependencies = [ + "alembic>=1.13.0,<1.14", "click>=8.1,<9", "fastapi>=0.103,<0.104", "httpx>=0.25,<1", + "Jinja2>=3.0,<4", "pydantic[email]>=2.4,<3", + "pydantic-settings>=2.0,<3", "pyyaml>=6.0,<7", "pyyaml-include>=1.3,<2", "sqlalchemy[asyncio]>=2.0,<3", "sqlalchemy-utils>=0.41,<1", - "uvicorn>=0.23,<1", - "Jinja2>=3.0,<4", - "pydantic-settings>=2.0,<3", "tenacity>=8.2,<9", + "uvicorn>=0.23,<1", ] [project.optional-dependencies] dev = [ "black==23.3.0", - "isort==5.11.5", - "pytest>=6.2.5", - "pytest-asyncio>=0.21,<1", - "respx>=0.20,<1", - "ipython>=8.16,<9", - "ipdb>=0.13,<0.14", - "sphinx-autobuild", - "ruff==0.1.5,<1", "djlint>=1.34.0", + "ipdb>=0.13,<0.14", + "ipython>=8.16,<9", + "isort==5.11.5", "pylint>=3.0.2", + "pytest-asyncio>=0.21,<1", + "pytest>=6.2.5", + "respx>=0.20,<1", + "ruff==0.1.5,<1", + "sphinx-autobuild", ] postgres = [ "psycopg2-binary>=2.9,<3", ] docs = [ - "sphinx>=7,<8", + "cogapp", "myst-parser>=2.0,<3", "shibuya", "sphinx-design", - "cogapp", + "sphinx>=7,<8", "sphinxcontrib-mermaid>=0.9,<1", ]