Compare commits

...

8 commits

15 changed files with 295 additions and 32 deletions

View file

@ -2,9 +2,17 @@
## [Unreleased]
## 0.7.0
Date: 2025-01-14
- ✨ — IPv4/IPv6 choice for checks, and choice for a dual-stack check (#69)
- ⚡ — Mutualize check requests (#68)
- ✨ — Ability to delay notification after X failures (#71)
- 🐛 — Fix bug when changing IP version not removing tasks (#72)
- ✨ — Allow to specify form data and headers for checks (#70)
- 🚸 — Add a long expiration date on auto-refresh cookies
- 🗃️ — Use bigint type for results id column in PostgreSQL (#73)
## 0.6.1

View file

@ -1 +1 @@
VERSION = "0.6.1"
VERSION = "0.7.0"

View file

@ -6,6 +6,7 @@ import asyncio
import json
import logging
import socket
from hashlib import md5
from time import sleep
from typing import List
@ -57,7 +58,7 @@ class ArgosAgent: # pylint: disable-msg=too-many-instance-attributes
self._http_client = httpx.AsyncClient(headers=auth_header)
ua_header = {
"User-Agent": f"Prout Argos Panoptes {VERSION} "
"User-Agent": f"Argos Panoptes {VERSION} "
"(about: https://argos-monitoring.framasoft.org/)",
}
self._http_client_v4 = httpx.AsyncClient(
@ -77,24 +78,59 @@ class ArgosAgent: # pylint: disable-msg=too-many-instance-attributes
await asyncio.sleep(self.wait_time)
async def _do_request(self, group: str, details: dict):
headers = {}
if details["request_data"] is not None:
request_data = json.loads(details["request_data"])
if request_data["headers"] is not None:
headers = request_data["headers"]
if details["ip_version"] == "4":
http_client = self._http_client_v4
else:
http_client = self._http_client_v6
try:
if details["ip_version"] == "4":
response = await self._http_client_v4.request( # type: ignore[union-attr]
method=details["method"], url=details["url"], timeout=60
if details["request_data"] is None or request_data["data"] is None:
response = await http_client.request( # type: ignore[union-attr]
method=details["method"],
url=details["url"],
headers=headers,
timeout=60,
)
elif request_data["json"]:
response = await http_client.request( # type: ignore[union-attr]
method=details["method"],
url=details["url"],
headers=headers,
json=request_data["data"],
timeout=60,
)
else:
response = await self._http_client_v6.request( # type: ignore[union-attr]
method=details["method"], url=details["url"], timeout=60
response = await http_client.request( # type: ignore[union-attr]
method=details["method"],
url=details["url"],
headers=headers,
data=request_data["data"],
timeout=60,
)
except httpx.ReadError:
sleep(1)
if details["ip_version"] == "4":
response = await self._http_client_v4.request( # type: ignore[union-attr]
if details["request_data"] is None or request_data["data"] is None:
response = await http_client.request( # type: ignore[union-attr]
method=details["method"], url=details["url"], timeout=60
)
elif request_data["json"]:
response = await http_client.request( # type: ignore[union-attr]
method=details["method"],
url=details["url"],
json=request_data["data"],
timeout=60,
)
else:
response = await self._http_client_v6.request( # type: ignore[union-attr]
method=details["method"], url=details["url"], timeout=60
response = await http_client.request( # type: ignore[union-attr]
method=details["method"],
url=details["url"],
data=request_data["data"],
timeout=60,
)
self._res_cache[group] = response
@ -128,6 +164,7 @@ class ArgosAgent: # pylint: disable-msg=too-many-instance-attributes
logger.info("Received %i tasks from the server", len(data))
req_groups = {}
_tasks = []
for _task in data:
task = Task(**_task)
@ -135,15 +172,23 @@ class ArgosAgent: # pylint: disable-msg=too-many-instance-attributes
group = task.task_group
if task.check == "http-to-https":
data = task.request_data
if data is None:
data = ""
url = str(httpx.URL(task.url).copy_with(scheme="http"))
group = f"{task.method}-{task.ip_version}-{url}"
group = (
f"{task.method}-{task.ip_version}-{url}-"
f"{md5(data.encode()).hexdigest()}"
)
_task["task_group"] = group
req_groups[group] = {
"url": url,
"ip_version": task.ip_version,
"method": task.method,
"request_data": task.request_data,
}
_tasks.append(_task)
requests = []
for group, details in req_groups.items():
@ -153,7 +198,7 @@ class ArgosAgent: # pylint: disable-msg=too-many-instance-attributes
await asyncio.gather(*requests)
tasks = []
for task in data:
for task in _tasks:
tasks.append(self._complete_task(task))
if tasks:

View file

@ -190,6 +190,17 @@ websites:
- 302
- 307
- path: "/admin/"
methode: "POST"
# Send form data in the request
request_data:
data:
login: "admin"
password: "my-password"
# To send data as JSON (optional, default is false):
is_json: true
# To send additional headers
headers:
Authorization: "Bearer foo-bar-baz"
checks:
# Check that the return HTTP status is one of those
# Similar to status-is, verify that you dont mistyped it!

View file

@ -5,7 +5,7 @@ For database models, see argos.server.models.
import json
from typing import Dict, List, Literal, Tuple
from typing import Any, Dict, List, Literal, Tuple
from durations_nlp import Duration
from pydantic import (
@ -18,7 +18,7 @@ from pydantic import (
PositiveInt,
field_validator,
)
from pydantic.functional_validators import BeforeValidator
from pydantic.functional_validators import AfterValidator, BeforeValidator
from pydantic.networks import UrlConstraints
from pydantic_core import Url
from typing_extensions import Annotated
@ -104,9 +104,26 @@ def parse_checks(value):
return (name, expected)
def parse_request_data(value):
"""Turn form or JSON data into JSON string"""
return json.dumps(
{"data": value.data, "json": value.is_json, "headers": value.headers}
)
class RequestData(BaseModel):
data: Any = None
is_json: bool = False
headers: Dict[str, str] | None = None
class WebsitePath(BaseModel):
path: str
method: Method = "GET"
request_data: Annotated[
RequestData, AfterValidator(parse_request_data)
] | None = None
checks: List[
Annotated[
Tuple[str, str],

View file

@ -22,6 +22,7 @@ class Task(BaseModel):
ip_version: IPVersion
check: str
method: Method
request_data: str | None
expected: str
task_group: str
retry_before_notification: int

View file

@ -0,0 +1,28 @@
"""Add request data to tasks
Revision ID: 31255a412d63
Revises: 80a29f64f91c
Create Date: 2024-12-09 16:40:20.926138
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = "31255a412d63"
down_revision: Union[str, None] = "80a29f64f91c"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
with op.batch_alter_table("tasks", schema=None) as batch_op:
batch_op.add_column(sa.Column("request_data", sa.String(), nullable=True))
def downgrade() -> None:
with op.batch_alter_table("tasks", schema=None) as batch_op:
batch_op.drop_column("request_data")

View file

@ -8,6 +8,7 @@ Create Date: 2024-12-02 14:12:40.558033
from typing import Sequence, Union
from alembic import op
from sqlalchemy.dialects.postgresql import ENUM
import sqlalchemy as sa
@ -19,14 +20,15 @@ depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
enum = ENUM("4", "6", name="ip_version_enum", create_type=False)
enum.create(op.get_bind(), checkfirst=False)
with op.batch_alter_table("tasks", schema=None) as batch_op:
batch_op.add_column(
sa.Column(
"ip_version", sa.Enum("4", "6"), server_default="4", nullable=False
)
sa.Column("ip_version", enum, server_default="4", nullable=False)
)
def downgrade() -> None:
with op.batch_alter_table("tasks", schema=None) as batch_op:
batch_op.drop_column("ip_version")
ENUM(name="ip_version_enum").drop(op.get_bind(), checkfirst=False)

View file

@ -0,0 +1,44 @@
"""Use bigint for results id field
Revision ID: bd4b4962696a
Revises: 31255a412d63
Create Date: 2025-01-06 11:44:37.552965
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = "bd4b4962696a"
down_revision: Union[str, None] = "31255a412d63"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
bind = op.get_bind()
if bind.engine.name != "sqlite":
with op.batch_alter_table("results", schema=None) as batch_op:
batch_op.alter_column(
"id",
existing_type=sa.INTEGER(),
type_=sa.BigInteger(),
existing_nullable=False,
autoincrement=True,
)
def downgrade() -> None:
bind = op.get_bind()
if bind.engine.name != "sqlite":
with op.batch_alter_table("results", schema=None) as batch_op:
batch_op.alter_column(
"id",
existing_type=sa.BigInteger(),
type_=sa.INTEGER(),
existing_nullable=False,
autoincrement=True,
)

View file

@ -1,6 +1,7 @@
"""Database models"""
from datetime import datetime, timedelta
from hashlib import md5
from typing import List, Literal
from sqlalchemy import (
@ -17,10 +18,14 @@ from argos.schemas.utils import IPVersion, Method
def compute_task_group(context) -> str:
data = context.current_parameters["request_data"]
if data is None:
data = ""
return (
f"{context.current_parameters['method']}-"
f"{context.current_parameters['ip_version']}-"
f"{context.current_parameters['url']}"
f"{context.current_parameters['url']}-"
f"{md5(data.encode()).hexdigest()}"
)
@ -43,7 +48,7 @@ class Task(Base):
url: Mapped[str] = mapped_column()
domain: Mapped[str] = mapped_column()
ip_version: Mapped[IPVersion] = mapped_column(
Enum("4", "6"),
Enum("4", "6", name="ip_version_enum"),
)
check: Mapped[str] = mapped_column()
expected: Mapped[str] = mapped_column()
@ -67,6 +72,7 @@ class Task(Base):
),
insert_default="GET",
)
request_data: Mapped[str] = mapped_column(nullable=True)
# Orchestration-related
selected_by: Mapped[str] = mapped_column(nullable=True)

View file

@ -83,13 +83,22 @@ async def count_results(db: Session):
return db.query(Result).count()
async def has_config_changed(db: Session, config: schemas.Config) -> bool:
async def has_config_changed(db: Session, config: schemas.Config) -> bool: # pylint: disable-msg=too-many-statements
"""Check if websites config has changed by using a hashsum and a config cache"""
websites_hash = sha256(str(config.websites).encode()).hexdigest()
conf_caches = db.query(ConfigCache).all()
same_config = True
keys = [
"websites_hash",
"general_frequency",
"general_recheck_delay",
"general_retry_before_notification",
"general_ipv4",
"general_ipv6",
]
if conf_caches:
for conf in conf_caches:
keys.remove(conf.name)
match conf.name:
case "websites_hash":
if conf.val != websites_hash:
@ -111,9 +120,62 @@ async def has_config_changed(db: Session, config: schemas.Config) -> bool:
same_config = False
conf.val = str(config.general.retry_before_notification)
conf.updated_at = datetime.now()
case "general_ipv4":
if conf.val != str(config.general.ipv4):
same_config = False
conf.val = str(config.general.ipv4)
conf.updated_at = datetime.now()
case "general_ipv6":
if conf.val != str(config.general.ipv6):
same_config = False
conf.val = str(config.general.ipv6)
conf.updated_at = datetime.now()
for i in keys:
match i:
case "websites_hash":
c = ConfigCache(
name="websites_hash",
val=websites_hash,
updated_at=datetime.now(),
)
case "general_frequency":
c = ConfigCache(
name="general_frequency",
val=str(config.general.frequency),
updated_at=datetime.now(),
)
case "general_recheck_delay":
c = ConfigCache(
name="general_recheck_delay",
val=str(config.general.recheck_delay),
updated_at=datetime.now(),
)
case "general_retry_before_notification":
c = ConfigCache(
name="general_retry_before_notification",
val=str(config.general.retry_before_notification),
updated_at=datetime.now(),
)
case "general_ipv4":
c = ConfigCache(
name="general_ipv4",
val=str(config.general.ipv4),
updated_at=datetime.now(),
)
case "general_ipv6":
c = ConfigCache(
name="general_ipv6",
val=str(config.general.ipv6),
updated_at=datetime.now(),
)
db.add(c)
db.commit()
if keys:
return True
if same_config:
return False
@ -136,10 +198,22 @@ async def has_config_changed(db: Session, config: schemas.Config) -> bool:
val=str(config.general.retry_before_notification),
updated_at=datetime.now(),
)
gen_ipv4 = ConfigCache(
name="general_ipv4",
val=str(config.general.ipv4),
updated_at=datetime.now(),
)
gen_ipv6 = ConfigCache(
name="general_ipv6",
val=str(config.general.ipv6),
updated_at=datetime.now(),
)
db.add(web_hash)
db.add(gen_freq)
db.add(gen_recheck)
db.add(gen_retry_before_notif)
db.add(gen_ipv4)
db.add(gen_ipv6)
db.commit()
return True
@ -173,12 +247,6 @@ async def update_from_config(db: Session, config: schemas.Config): # pylint: di
continue
for ip_version in ["4", "6"]:
if ip_version == "4" and ipv4 is False:
continue
if ip_version == "6" and ipv6 is False:
continue
for p in website.paths:
url = urljoin(domain, str(p.path))
for check_key, expected in p.checks:
@ -188,14 +256,22 @@ async def update_from_config(db: Session, config: schemas.Config): # pylint: di
.filter(
Task.url == url,
Task.method == p.method,
Task.request_data == p.request_data,
Task.check == check_key,
Task.expected == expected,
Task.ip_version == ip_version,
)
.all()
)
if (ip_version == "4" and ipv4 is False) or (
ip_version == "6" and ipv6 is False
):
continue
if existing_tasks:
existing_task = existing_tasks[0]
seen_tasks.append(existing_task.id)
if frequency != existing_task.frequency:
@ -225,7 +301,14 @@ async def update_from_config(db: Session, config: schemas.Config): # pylint: di
)
else:
properties = (url, p.method, check_key, expected, ip_version)
properties = (
url,
p.method,
check_key,
expected,
ip_version,
p.request_data,
)
if properties not in unique_properties:
unique_properties.append(properties)
task = Task(
@ -233,6 +316,7 @@ async def update_from_config(db: Session, config: schemas.Config): # pylint: di
url=url,
ip_version=ip_version,
method=p.method,
request_data=p.request_data,
check=check_key,
expected=expected,
frequency=frequency,

View file

@ -357,8 +357,21 @@ async def set_refresh_cookies_view(
request.url_for("get_severity_counts_view"),
status_code=status.HTTP_303_SEE_OTHER,
)
response.set_cookie(key="auto_refresh_enabled", value=str(auto_refresh_enabled))
# Cookies age in Chrome cant be more than 400 days
# https://developer.chrome.com/blog/cookie-max-age-expires
delta = int(timedelta(days=400).total_seconds())
response.set_cookie(
key="auto_refresh_seconds", value=str(max(5, int(auto_refresh_seconds)))
key="auto_refresh_enabled",
value=str(auto_refresh_enabled),
httponly=True,
samesite="strict",
expires=delta,
)
response.set_cookie(
key="auto_refresh_seconds",
value=str(max(5, int(auto_refresh_seconds))),
httponly=True,
samesite="strict",
expires=delta,
)
return response

View file

@ -1,6 +1,8 @@
---
general:
db:
# The database URL, as defined in SQLAlchemy docs : https://docs.sqlalchemy.org/en/20/core/engines.html#database-urls
# The database URL, as defined in SQLAlchemy docs:
# https://docs.sqlalchemy.org/en/20/core/engines.html#database-urls
url: "sqlite:////tmp/test-argos.db"
env: test
cookie_secret: "foo-bar-baz"

View file

@ -37,6 +37,7 @@ def ssl_task(now):
domain="https://example.org",
ip_version="6",
method="GET",
request_data=None,
task_group="GET-6-https://example.org",
check="ssl-certificate-expiration",
retry_before_notification=0,

View file

@ -1,3 +1,4 @@
---
- domain: "https://mypads.framapad.org"
paths:
- path: "/mypads/"