Merge branch 'almet/migration' into 'main'

Add support for migrations

See merge request framasoft/framaspace/argos!32
This commit is contained in:
Luc Didry 2024-02-01 13:54:20 +00:00
commit 93b05e6d4f
10 changed files with 308 additions and 44 deletions

View file

@ -20,6 +20,7 @@ install:
stage: install
script:
- make venv
- make develop
cache:
<<: *global_cache
policy: push

View file

@ -9,6 +9,7 @@ NC=\033[0m # No Color
venv: ## Create the venv
python3 -m venv venv
develop: venv ## Install the dev dependencies
venv/bin/pip install -e ".[dev,docs]"
docs: cog ## Build the docs
venv/bin/sphinx-build docs public
@ -19,11 +20,11 @@ cog: ## Run cog, to integrate the CLI options to the docs.
venv/bin/cog -r docs/*.md
tests: venv ## Run the tests
venv/bin/pytest
djlint: venv
djlint: venv ## Format the templates
venv/bin/djlint --ignore=H030,H031 --lint argos/server/templates/*html
pylint: venv
pylint: venv ## Runs pylint on the code
venv/bin/pylint argos
lint: djlint pylint
help:
@python3 -c "$$PRINT_HELP_PYSCRIPT" < $(MAKEFILE_LIST)

41
alembic.ini Normal file
View file

@ -0,0 +1,41 @@
[alembic]
script_location = alembic
prepend_sys_path = .
version_path_separator = os
sqlalchemy.url = sqlite:////tmp/argos.db
# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

62
alembic/env.py Normal file
View file

@ -0,0 +1,62 @@
from logging.config import fileConfig
from alembic import context
from argos.server.models import Base
from sqlalchemy import engine_from_config, pool
config = context.config
if config.config_file_name is not None:
fileConfig(config.config_file_name)
target_metadata = Base.metadata
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

26
alembic/script.py.mako Normal file
View file

@ -0,0 +1,26 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

View file

@ -0,0 +1,63 @@
"""Initial migrations
Revision ID: 7d480e6f1112
Revises:
Create Date: 2023-12-16 23:33:40.059077
"""
from typing import Sequence, Union
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision: str = "7d480e6f1112"
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.create_table(
"tasks",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("url", sa.String(), nullable=False),
sa.Column("domain", sa.String(), nullable=False),
sa.Column("check", sa.String(), nullable=False),
sa.Column("expected", sa.String(), nullable=False),
sa.Column("frequency", sa.Integer(), nullable=False),
sa.Column("selected_by", sa.String(), nullable=True),
sa.Column("selected_at", sa.DateTime(), nullable=True),
sa.Column("completed_at", sa.DateTime(), nullable=True),
sa.Column("next_run", sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"results",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("task_id", sa.Integer(), nullable=False),
sa.Column("agent_id", sa.String(), nullable=True),
sa.Column("submitted_at", sa.DateTime(), nullable=False),
sa.Column(
"status",
sa.Enum("success", "failure", "error", "on-check", name="status"),
nullable=False,
),
sa.Column(
"severity",
sa.Enum("ok", "warning", "critical", name="severity"),
nullable=False,
),
sa.Column("context", sa.JSON(), nullable=False),
sa.ForeignKeyConstraint(
["task_id"],
["tasks.id"],
),
sa.PrimaryKeyConstraint("id"),
)
def downgrade() -> None:
op.drop_table("results")
op.drop_table("tasks")

View file

@ -1,5 +1,6 @@
import asyncio
import os
from functools import wraps
import click
import uvicorn
@ -8,16 +9,22 @@ from argos import logging
from argos.agent import ArgosAgent
def validate_max_lock_seconds(ctx, param, value):
if value <= 60:
raise click.BadParameter("Should be strictly higher than 60")
return value
async def get_db():
from argos.server.main import connect_to_db, get_application, setup_database
app = get_application()
setup_database(app)
return await connect_to_db(app)
def validate_max_results(ctx, param, value):
if value <= 0:
raise click.BadParameter("Should be a positive integer")
return value
def coroutine(f):
"""Decorator to enable async functions in click"""
@wraps(f)
def wrapper(*args, **kwargs):
return asyncio.run(f(*args, **kwargs))
return wrapper
@click.group()
@ -79,8 +86,25 @@ def start(host, port, config, reload):
uvicorn.run("argos.server:app", host=host, port=port, reload=reload)
def validate_max_lock_seconds(ctx, param, value):
if value <= 60:
raise click.BadParameter("Should be strictly higher than 60")
return value
def validate_max_results(ctx, param, value):
if value <= 0:
raise click.BadParameter("Should be a positive integer")
return value
@server.command()
@click.option("--max-results", default=100, help="Number of results per task to keep")
@click.option(
"--max-results",
default=100,
help="Number of results per task to keep",
callback=validate_max_results,
)
@click.option(
"--max-lock-seconds",
default=100,
@ -88,7 +112,8 @@ def start(host, port, config, reload):
"(the checks have a timeout value of 60 seconds)",
callback=validate_max_lock_seconds,
)
def cleandb(max_results, max_lock_seconds):
@coroutine
async def cleandb(max_results, max_lock_seconds):
"""Clean the database (to run routinely)
\b
@ -97,19 +122,13 @@ def cleandb(max_results, max_lock_seconds):
"""
# The imports are made here otherwise the agent will need server configuration files.
from argos.server import queries
from argos.server.main import connect_to_db, get_application, setup_database
async def clean_old_results():
app = get_application()
setup_database(app)
db = await connect_to_db(app)
removed = await queries.remove_old_results(db, max_results)
updated = await queries.release_old_locks(db, max_lock_seconds)
db = await get_db()
removed = await queries.remove_old_results(db, max_results)
updated = await queries.release_old_locks(db, max_lock_seconds)
click.echo(f"{removed} results removed")
click.echo(f"{updated} locks released")
asyncio.run(clean_old_results())
click.echo(f"{removed} results removed")
click.echo(f"{updated} locks released")
if __name__ == "__main__":

View file

@ -1,25 +1,64 @@
general:
frequency: "5m" # Run checks every minute.
frequency: "1m" # Run checks every minute.
# Which way do you want to be warned when a check goes to that severity?
alerts:
error:
ok:
- local
warning:
- local
alert:
critical:
- local
unknown:
- local
# mail:
# mailfrom: no-reply@example.org
# host: 127.0.0.1
# port: 25
# ssl: False
# starttls: False
# auth:
# login: foo
# password: bar
# addresses:
# - foo@admin.example.org
# - bar@admin.example.org
# gotify:
# - url: https://example.org
# tokens:
# - foo
# - bar
service:
secrets:
- Shorts-Tribunal-Plentiful-Penknife-Lazily-Move0
- "1234"
# Secrets can be generated using `openssl rand -base64 32`.
ssl:
thresholds:
- "15d": critical
- "25d": warning
- "1d": critical
- "5d": warning
# It's also possible to define the checks in another file
# with the include syntax:
#
# websites: !include websites.yaml
#
websites: !include websites.yaml
websites:
- domain: "https://mypads.example.org"
paths:
- path: "/mypads/"
checks:
- status-is: 200
- body-contains: '<div id= "mypads"></div>'
- ssl-certificate-expiration: "on-check"
- path: "/admin/"
checks:
- status-is: 401
- domain: "https://munin.example.org"
paths:
- path: "/"
checks:
- status-is: 301
- path: "/munin/"
checks:
- status-is: 401

View file

@ -0,0 +1,11 @@
# Adding a database migration
We are using [Alembic](https://alembic.sqlalchemy.org) to handle the database
migrations. Here is how to proceed in order to add a new migration:
First, do your changes in the code, change the model, add new tables, etc. Once
you're done, you can create a new migration.
```bash
venv/bin/alembic revision --autogenerate -m "migration reason"
```

View file

@ -17,43 +17,44 @@ classifiers = [
]
dependencies = [
"alembic>=1.13.0,<1.14",
"click>=8.1,<9",
"fastapi>=0.103,<0.104",
"httpx>=0.25,<1",
"Jinja2>=3.0,<4",
"pydantic[email]>=2.4,<3",
"pydantic-settings>=2.0,<3",
"pyyaml>=6.0,<7",
"pyyaml-include>=1.3,<2",
"sqlalchemy[asyncio]>=2.0,<3",
"sqlalchemy-utils>=0.41,<1",
"uvicorn>=0.23,<1",
"Jinja2>=3.0,<4",
"pydantic-settings>=2.0,<3",
"tenacity>=8.2,<9",
"uvicorn>=0.23,<1",
]
[project.optional-dependencies]
dev = [
"black==23.3.0",
"isort==5.11.5",
"pytest>=6.2.5",
"pytest-asyncio>=0.21,<1",
"respx>=0.20,<1",
"ipython>=8.16,<9",
"ipdb>=0.13,<0.14",
"sphinx-autobuild",
"ruff==0.1.5,<1",
"djlint>=1.34.0",
"ipdb>=0.13,<0.14",
"ipython>=8.16,<9",
"isort==5.11.5",
"pylint>=3.0.2",
"pytest-asyncio>=0.21,<1",
"pytest>=6.2.5",
"respx>=0.20,<1",
"ruff==0.1.5,<1",
"sphinx-autobuild",
]
postgres = [
"psycopg2-binary>=2.9,<3",
]
docs = [
"sphinx>=7,<8",
"cogapp",
"myst-parser>=2.0,<3",
"shibuya",
"sphinx-design",
"cogapp",
"sphinx>=7,<8",
"sphinxcontrib-mermaid>=0.9,<1",
]