mirror of
https://framagit.org/framasoft/framaspace/argos.git
synced 2025-05-17 10:41:50 +02:00
Compare commits
8 commits
0204a01e66
...
3d209fed22
Author | SHA1 | Date | |
---|---|---|---|
![]() |
3d209fed22 | ||
![]() |
acd90133bd | ||
![]() |
be90aa095a | ||
![]() |
06f8310505 | ||
![]() |
fe89d62e88 | ||
![]() |
1e7672abca | ||
![]() |
2ef999fa63 | ||
![]() |
9c8be94c20 |
15 changed files with 295 additions and 32 deletions
|
@ -2,9 +2,17 @@
|
||||||
|
|
||||||
## [Unreleased]
|
## [Unreleased]
|
||||||
|
|
||||||
|
## 0.7.0
|
||||||
|
|
||||||
|
Date: 2025-01-14
|
||||||
|
|
||||||
- ✨ — IPv4/IPv6 choice for checks, and choice for a dual-stack check (#69)
|
- ✨ — IPv4/IPv6 choice for checks, and choice for a dual-stack check (#69)
|
||||||
- ⚡ — Mutualize check requests (#68)
|
- ⚡ — Mutualize check requests (#68)
|
||||||
- ✨ — Ability to delay notification after X failures (#71)
|
- ✨ — Ability to delay notification after X failures (#71)
|
||||||
|
- 🐛 — Fix bug when changing IP version not removing tasks (#72)
|
||||||
|
- ✨ — Allow to specify form data and headers for checks (#70)
|
||||||
|
- 🚸 — Add a long expiration date on auto-refresh cookies
|
||||||
|
- 🗃️ — Use bigint type for results id column in PostgreSQL (#73)
|
||||||
|
|
||||||
## 0.6.1
|
## 0.6.1
|
||||||
|
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
VERSION = "0.6.1"
|
VERSION = "0.7.0"
|
||||||
|
|
|
@ -6,6 +6,7 @@ import asyncio
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import socket
|
import socket
|
||||||
|
from hashlib import md5
|
||||||
from time import sleep
|
from time import sleep
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
|
@ -57,7 +58,7 @@ class ArgosAgent: # pylint: disable-msg=too-many-instance-attributes
|
||||||
self._http_client = httpx.AsyncClient(headers=auth_header)
|
self._http_client = httpx.AsyncClient(headers=auth_header)
|
||||||
|
|
||||||
ua_header = {
|
ua_header = {
|
||||||
"User-Agent": f"Prout Argos Panoptes {VERSION} "
|
"User-Agent": f"Argos Panoptes {VERSION} "
|
||||||
"(about: https://argos-monitoring.framasoft.org/)",
|
"(about: https://argos-monitoring.framasoft.org/)",
|
||||||
}
|
}
|
||||||
self._http_client_v4 = httpx.AsyncClient(
|
self._http_client_v4 = httpx.AsyncClient(
|
||||||
|
@ -77,24 +78,59 @@ class ArgosAgent: # pylint: disable-msg=too-many-instance-attributes
|
||||||
await asyncio.sleep(self.wait_time)
|
await asyncio.sleep(self.wait_time)
|
||||||
|
|
||||||
async def _do_request(self, group: str, details: dict):
|
async def _do_request(self, group: str, details: dict):
|
||||||
|
headers = {}
|
||||||
|
if details["request_data"] is not None:
|
||||||
|
request_data = json.loads(details["request_data"])
|
||||||
|
if request_data["headers"] is not None:
|
||||||
|
headers = request_data["headers"]
|
||||||
|
|
||||||
|
if details["ip_version"] == "4":
|
||||||
|
http_client = self._http_client_v4
|
||||||
|
else:
|
||||||
|
http_client = self._http_client_v6
|
||||||
try:
|
try:
|
||||||
if details["ip_version"] == "4":
|
if details["request_data"] is None or request_data["data"] is None:
|
||||||
response = await self._http_client_v4.request( # type: ignore[union-attr]
|
response = await http_client.request( # type: ignore[union-attr]
|
||||||
method=details["method"], url=details["url"], timeout=60
|
method=details["method"],
|
||||||
|
url=details["url"],
|
||||||
|
headers=headers,
|
||||||
|
timeout=60,
|
||||||
|
)
|
||||||
|
elif request_data["json"]:
|
||||||
|
response = await http_client.request( # type: ignore[union-attr]
|
||||||
|
method=details["method"],
|
||||||
|
url=details["url"],
|
||||||
|
headers=headers,
|
||||||
|
json=request_data["data"],
|
||||||
|
timeout=60,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
response = await self._http_client_v6.request( # type: ignore[union-attr]
|
response = await http_client.request( # type: ignore[union-attr]
|
||||||
method=details["method"], url=details["url"], timeout=60
|
method=details["method"],
|
||||||
|
url=details["url"],
|
||||||
|
headers=headers,
|
||||||
|
data=request_data["data"],
|
||||||
|
timeout=60,
|
||||||
)
|
)
|
||||||
except httpx.ReadError:
|
except httpx.ReadError:
|
||||||
sleep(1)
|
sleep(1)
|
||||||
if details["ip_version"] == "4":
|
if details["request_data"] is None or request_data["data"] is None:
|
||||||
response = await self._http_client_v4.request( # type: ignore[union-attr]
|
response = await http_client.request( # type: ignore[union-attr]
|
||||||
method=details["method"], url=details["url"], timeout=60
|
method=details["method"], url=details["url"], timeout=60
|
||||||
)
|
)
|
||||||
|
elif request_data["json"]:
|
||||||
|
response = await http_client.request( # type: ignore[union-attr]
|
||||||
|
method=details["method"],
|
||||||
|
url=details["url"],
|
||||||
|
json=request_data["data"],
|
||||||
|
timeout=60,
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
response = await self._http_client_v6.request( # type: ignore[union-attr]
|
response = await http_client.request( # type: ignore[union-attr]
|
||||||
method=details["method"], url=details["url"], timeout=60
|
method=details["method"],
|
||||||
|
url=details["url"],
|
||||||
|
data=request_data["data"],
|
||||||
|
timeout=60,
|
||||||
)
|
)
|
||||||
|
|
||||||
self._res_cache[group] = response
|
self._res_cache[group] = response
|
||||||
|
@ -128,6 +164,7 @@ class ArgosAgent: # pylint: disable-msg=too-many-instance-attributes
|
||||||
logger.info("Received %i tasks from the server", len(data))
|
logger.info("Received %i tasks from the server", len(data))
|
||||||
|
|
||||||
req_groups = {}
|
req_groups = {}
|
||||||
|
_tasks = []
|
||||||
for _task in data:
|
for _task in data:
|
||||||
task = Task(**_task)
|
task = Task(**_task)
|
||||||
|
|
||||||
|
@ -135,15 +172,23 @@ class ArgosAgent: # pylint: disable-msg=too-many-instance-attributes
|
||||||
group = task.task_group
|
group = task.task_group
|
||||||
|
|
||||||
if task.check == "http-to-https":
|
if task.check == "http-to-https":
|
||||||
|
data = task.request_data
|
||||||
|
if data is None:
|
||||||
|
data = ""
|
||||||
url = str(httpx.URL(task.url).copy_with(scheme="http"))
|
url = str(httpx.URL(task.url).copy_with(scheme="http"))
|
||||||
group = f"{task.method}-{task.ip_version}-{url}"
|
group = (
|
||||||
|
f"{task.method}-{task.ip_version}-{url}-"
|
||||||
|
f"{md5(data.encode()).hexdigest()}"
|
||||||
|
)
|
||||||
_task["task_group"] = group
|
_task["task_group"] = group
|
||||||
|
|
||||||
req_groups[group] = {
|
req_groups[group] = {
|
||||||
"url": url,
|
"url": url,
|
||||||
"ip_version": task.ip_version,
|
"ip_version": task.ip_version,
|
||||||
"method": task.method,
|
"method": task.method,
|
||||||
|
"request_data": task.request_data,
|
||||||
}
|
}
|
||||||
|
_tasks.append(_task)
|
||||||
|
|
||||||
requests = []
|
requests = []
|
||||||
for group, details in req_groups.items():
|
for group, details in req_groups.items():
|
||||||
|
@ -153,7 +198,7 @@ class ArgosAgent: # pylint: disable-msg=too-many-instance-attributes
|
||||||
await asyncio.gather(*requests)
|
await asyncio.gather(*requests)
|
||||||
|
|
||||||
tasks = []
|
tasks = []
|
||||||
for task in data:
|
for task in _tasks:
|
||||||
tasks.append(self._complete_task(task))
|
tasks.append(self._complete_task(task))
|
||||||
|
|
||||||
if tasks:
|
if tasks:
|
||||||
|
|
|
@ -190,6 +190,17 @@ websites:
|
||||||
- 302
|
- 302
|
||||||
- 307
|
- 307
|
||||||
- path: "/admin/"
|
- path: "/admin/"
|
||||||
|
methode: "POST"
|
||||||
|
# Send form data in the request
|
||||||
|
request_data:
|
||||||
|
data:
|
||||||
|
login: "admin"
|
||||||
|
password: "my-password"
|
||||||
|
# To send data as JSON (optional, default is false):
|
||||||
|
is_json: true
|
||||||
|
# To send additional headers
|
||||||
|
headers:
|
||||||
|
Authorization: "Bearer foo-bar-baz"
|
||||||
checks:
|
checks:
|
||||||
# Check that the return HTTP status is one of those
|
# Check that the return HTTP status is one of those
|
||||||
# Similar to status-is, verify that you don’t mistyped it!
|
# Similar to status-is, verify that you don’t mistyped it!
|
||||||
|
|
|
@ -5,7 +5,7 @@ For database models, see argos.server.models.
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
|
||||||
from typing import Dict, List, Literal, Tuple
|
from typing import Any, Dict, List, Literal, Tuple
|
||||||
|
|
||||||
from durations_nlp import Duration
|
from durations_nlp import Duration
|
||||||
from pydantic import (
|
from pydantic import (
|
||||||
|
@ -18,7 +18,7 @@ from pydantic import (
|
||||||
PositiveInt,
|
PositiveInt,
|
||||||
field_validator,
|
field_validator,
|
||||||
)
|
)
|
||||||
from pydantic.functional_validators import BeforeValidator
|
from pydantic.functional_validators import AfterValidator, BeforeValidator
|
||||||
from pydantic.networks import UrlConstraints
|
from pydantic.networks import UrlConstraints
|
||||||
from pydantic_core import Url
|
from pydantic_core import Url
|
||||||
from typing_extensions import Annotated
|
from typing_extensions import Annotated
|
||||||
|
@ -104,9 +104,26 @@ def parse_checks(value):
|
||||||
return (name, expected)
|
return (name, expected)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_request_data(value):
|
||||||
|
"""Turn form or JSON data into JSON string"""
|
||||||
|
|
||||||
|
return json.dumps(
|
||||||
|
{"data": value.data, "json": value.is_json, "headers": value.headers}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class RequestData(BaseModel):
|
||||||
|
data: Any = None
|
||||||
|
is_json: bool = False
|
||||||
|
headers: Dict[str, str] | None = None
|
||||||
|
|
||||||
|
|
||||||
class WebsitePath(BaseModel):
|
class WebsitePath(BaseModel):
|
||||||
path: str
|
path: str
|
||||||
method: Method = "GET"
|
method: Method = "GET"
|
||||||
|
request_data: Annotated[
|
||||||
|
RequestData, AfterValidator(parse_request_data)
|
||||||
|
] | None = None
|
||||||
checks: List[
|
checks: List[
|
||||||
Annotated[
|
Annotated[
|
||||||
Tuple[str, str],
|
Tuple[str, str],
|
||||||
|
|
|
@ -22,6 +22,7 @@ class Task(BaseModel):
|
||||||
ip_version: IPVersion
|
ip_version: IPVersion
|
||||||
check: str
|
check: str
|
||||||
method: Method
|
method: Method
|
||||||
|
request_data: str | None
|
||||||
expected: str
|
expected: str
|
||||||
task_group: str
|
task_group: str
|
||||||
retry_before_notification: int
|
retry_before_notification: int
|
||||||
|
|
|
@ -0,0 +1,28 @@
|
||||||
|
"""Add request data to tasks
|
||||||
|
|
||||||
|
Revision ID: 31255a412d63
|
||||||
|
Revises: 80a29f64f91c
|
||||||
|
Create Date: 2024-12-09 16:40:20.926138
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "31255a412d63"
|
||||||
|
down_revision: Union[str, None] = "80a29f64f91c"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
with op.batch_alter_table("tasks", schema=None) as batch_op:
|
||||||
|
batch_op.add_column(sa.Column("request_data", sa.String(), nullable=True))
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
with op.batch_alter_table("tasks", schema=None) as batch_op:
|
||||||
|
batch_op.drop_column("request_data")
|
|
@ -8,6 +8,7 @@ Create Date: 2024-12-02 14:12:40.558033
|
||||||
from typing import Sequence, Union
|
from typing import Sequence, Union
|
||||||
|
|
||||||
from alembic import op
|
from alembic import op
|
||||||
|
from sqlalchemy.dialects.postgresql import ENUM
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
@ -19,14 +20,15 @@ depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
def upgrade() -> None:
|
||||||
|
enum = ENUM("4", "6", name="ip_version_enum", create_type=False)
|
||||||
|
enum.create(op.get_bind(), checkfirst=False)
|
||||||
with op.batch_alter_table("tasks", schema=None) as batch_op:
|
with op.batch_alter_table("tasks", schema=None) as batch_op:
|
||||||
batch_op.add_column(
|
batch_op.add_column(
|
||||||
sa.Column(
|
sa.Column("ip_version", enum, server_default="4", nullable=False)
|
||||||
"ip_version", sa.Enum("4", "6"), server_default="4", nullable=False
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
def downgrade() -> None:
|
||||||
with op.batch_alter_table("tasks", schema=None) as batch_op:
|
with op.batch_alter_table("tasks", schema=None) as batch_op:
|
||||||
batch_op.drop_column("ip_version")
|
batch_op.drop_column("ip_version")
|
||||||
|
ENUM(name="ip_version_enum").drop(op.get_bind(), checkfirst=False)
|
||||||
|
|
|
@ -0,0 +1,44 @@
|
||||||
|
"""Use bigint for results id field
|
||||||
|
|
||||||
|
Revision ID: bd4b4962696a
|
||||||
|
Revises: 31255a412d63
|
||||||
|
Create Date: 2025-01-06 11:44:37.552965
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "bd4b4962696a"
|
||||||
|
down_revision: Union[str, None] = "31255a412d63"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
bind = op.get_bind()
|
||||||
|
if bind.engine.name != "sqlite":
|
||||||
|
with op.batch_alter_table("results", schema=None) as batch_op:
|
||||||
|
batch_op.alter_column(
|
||||||
|
"id",
|
||||||
|
existing_type=sa.INTEGER(),
|
||||||
|
type_=sa.BigInteger(),
|
||||||
|
existing_nullable=False,
|
||||||
|
autoincrement=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
bind = op.get_bind()
|
||||||
|
if bind.engine.name != "sqlite":
|
||||||
|
with op.batch_alter_table("results", schema=None) as batch_op:
|
||||||
|
batch_op.alter_column(
|
||||||
|
"id",
|
||||||
|
existing_type=sa.BigInteger(),
|
||||||
|
type_=sa.INTEGER(),
|
||||||
|
existing_nullable=False,
|
||||||
|
autoincrement=True,
|
||||||
|
)
|
|
@ -1,6 +1,7 @@
|
||||||
"""Database models"""
|
"""Database models"""
|
||||||
|
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
|
from hashlib import md5
|
||||||
from typing import List, Literal
|
from typing import List, Literal
|
||||||
|
|
||||||
from sqlalchemy import (
|
from sqlalchemy import (
|
||||||
|
@ -17,10 +18,14 @@ from argos.schemas.utils import IPVersion, Method
|
||||||
|
|
||||||
|
|
||||||
def compute_task_group(context) -> str:
|
def compute_task_group(context) -> str:
|
||||||
|
data = context.current_parameters["request_data"]
|
||||||
|
if data is None:
|
||||||
|
data = ""
|
||||||
return (
|
return (
|
||||||
f"{context.current_parameters['method']}-"
|
f"{context.current_parameters['method']}-"
|
||||||
f"{context.current_parameters['ip_version']}-"
|
f"{context.current_parameters['ip_version']}-"
|
||||||
f"{context.current_parameters['url']}"
|
f"{context.current_parameters['url']}-"
|
||||||
|
f"{md5(data.encode()).hexdigest()}"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -43,7 +48,7 @@ class Task(Base):
|
||||||
url: Mapped[str] = mapped_column()
|
url: Mapped[str] = mapped_column()
|
||||||
domain: Mapped[str] = mapped_column()
|
domain: Mapped[str] = mapped_column()
|
||||||
ip_version: Mapped[IPVersion] = mapped_column(
|
ip_version: Mapped[IPVersion] = mapped_column(
|
||||||
Enum("4", "6"),
|
Enum("4", "6", name="ip_version_enum"),
|
||||||
)
|
)
|
||||||
check: Mapped[str] = mapped_column()
|
check: Mapped[str] = mapped_column()
|
||||||
expected: Mapped[str] = mapped_column()
|
expected: Mapped[str] = mapped_column()
|
||||||
|
@ -67,6 +72,7 @@ class Task(Base):
|
||||||
),
|
),
|
||||||
insert_default="GET",
|
insert_default="GET",
|
||||||
)
|
)
|
||||||
|
request_data: Mapped[str] = mapped_column(nullable=True)
|
||||||
|
|
||||||
# Orchestration-related
|
# Orchestration-related
|
||||||
selected_by: Mapped[str] = mapped_column(nullable=True)
|
selected_by: Mapped[str] = mapped_column(nullable=True)
|
||||||
|
|
|
@ -83,13 +83,22 @@ async def count_results(db: Session):
|
||||||
return db.query(Result).count()
|
return db.query(Result).count()
|
||||||
|
|
||||||
|
|
||||||
async def has_config_changed(db: Session, config: schemas.Config) -> bool:
|
async def has_config_changed(db: Session, config: schemas.Config) -> bool: # pylint: disable-msg=too-many-statements
|
||||||
"""Check if websites config has changed by using a hashsum and a config cache"""
|
"""Check if websites config has changed by using a hashsum and a config cache"""
|
||||||
websites_hash = sha256(str(config.websites).encode()).hexdigest()
|
websites_hash = sha256(str(config.websites).encode()).hexdigest()
|
||||||
conf_caches = db.query(ConfigCache).all()
|
conf_caches = db.query(ConfigCache).all()
|
||||||
same_config = True
|
same_config = True
|
||||||
|
keys = [
|
||||||
|
"websites_hash",
|
||||||
|
"general_frequency",
|
||||||
|
"general_recheck_delay",
|
||||||
|
"general_retry_before_notification",
|
||||||
|
"general_ipv4",
|
||||||
|
"general_ipv6",
|
||||||
|
]
|
||||||
if conf_caches:
|
if conf_caches:
|
||||||
for conf in conf_caches:
|
for conf in conf_caches:
|
||||||
|
keys.remove(conf.name)
|
||||||
match conf.name:
|
match conf.name:
|
||||||
case "websites_hash":
|
case "websites_hash":
|
||||||
if conf.val != websites_hash:
|
if conf.val != websites_hash:
|
||||||
|
@ -111,9 +120,62 @@ async def has_config_changed(db: Session, config: schemas.Config) -> bool:
|
||||||
same_config = False
|
same_config = False
|
||||||
conf.val = str(config.general.retry_before_notification)
|
conf.val = str(config.general.retry_before_notification)
|
||||||
conf.updated_at = datetime.now()
|
conf.updated_at = datetime.now()
|
||||||
|
case "general_ipv4":
|
||||||
|
if conf.val != str(config.general.ipv4):
|
||||||
|
same_config = False
|
||||||
|
conf.val = str(config.general.ipv4)
|
||||||
|
conf.updated_at = datetime.now()
|
||||||
|
case "general_ipv6":
|
||||||
|
if conf.val != str(config.general.ipv6):
|
||||||
|
same_config = False
|
||||||
|
conf.val = str(config.general.ipv6)
|
||||||
|
conf.updated_at = datetime.now()
|
||||||
|
|
||||||
|
for i in keys:
|
||||||
|
match i:
|
||||||
|
case "websites_hash":
|
||||||
|
c = ConfigCache(
|
||||||
|
name="websites_hash",
|
||||||
|
val=websites_hash,
|
||||||
|
updated_at=datetime.now(),
|
||||||
|
)
|
||||||
|
case "general_frequency":
|
||||||
|
c = ConfigCache(
|
||||||
|
name="general_frequency",
|
||||||
|
val=str(config.general.frequency),
|
||||||
|
updated_at=datetime.now(),
|
||||||
|
)
|
||||||
|
case "general_recheck_delay":
|
||||||
|
c = ConfigCache(
|
||||||
|
name="general_recheck_delay",
|
||||||
|
val=str(config.general.recheck_delay),
|
||||||
|
updated_at=datetime.now(),
|
||||||
|
)
|
||||||
|
case "general_retry_before_notification":
|
||||||
|
c = ConfigCache(
|
||||||
|
name="general_retry_before_notification",
|
||||||
|
val=str(config.general.retry_before_notification),
|
||||||
|
updated_at=datetime.now(),
|
||||||
|
)
|
||||||
|
case "general_ipv4":
|
||||||
|
c = ConfigCache(
|
||||||
|
name="general_ipv4",
|
||||||
|
val=str(config.general.ipv4),
|
||||||
|
updated_at=datetime.now(),
|
||||||
|
)
|
||||||
|
case "general_ipv6":
|
||||||
|
c = ConfigCache(
|
||||||
|
name="general_ipv6",
|
||||||
|
val=str(config.general.ipv6),
|
||||||
|
updated_at=datetime.now(),
|
||||||
|
)
|
||||||
|
db.add(c)
|
||||||
|
|
||||||
db.commit()
|
db.commit()
|
||||||
|
|
||||||
|
if keys:
|
||||||
|
return True
|
||||||
|
|
||||||
if same_config:
|
if same_config:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -136,10 +198,22 @@ async def has_config_changed(db: Session, config: schemas.Config) -> bool:
|
||||||
val=str(config.general.retry_before_notification),
|
val=str(config.general.retry_before_notification),
|
||||||
updated_at=datetime.now(),
|
updated_at=datetime.now(),
|
||||||
)
|
)
|
||||||
|
gen_ipv4 = ConfigCache(
|
||||||
|
name="general_ipv4",
|
||||||
|
val=str(config.general.ipv4),
|
||||||
|
updated_at=datetime.now(),
|
||||||
|
)
|
||||||
|
gen_ipv6 = ConfigCache(
|
||||||
|
name="general_ipv6",
|
||||||
|
val=str(config.general.ipv6),
|
||||||
|
updated_at=datetime.now(),
|
||||||
|
)
|
||||||
db.add(web_hash)
|
db.add(web_hash)
|
||||||
db.add(gen_freq)
|
db.add(gen_freq)
|
||||||
db.add(gen_recheck)
|
db.add(gen_recheck)
|
||||||
db.add(gen_retry_before_notif)
|
db.add(gen_retry_before_notif)
|
||||||
|
db.add(gen_ipv4)
|
||||||
|
db.add(gen_ipv6)
|
||||||
db.commit()
|
db.commit()
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
@ -173,12 +247,6 @@ async def update_from_config(db: Session, config: schemas.Config): # pylint: di
|
||||||
continue
|
continue
|
||||||
|
|
||||||
for ip_version in ["4", "6"]:
|
for ip_version in ["4", "6"]:
|
||||||
if ip_version == "4" and ipv4 is False:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if ip_version == "6" and ipv6 is False:
|
|
||||||
continue
|
|
||||||
|
|
||||||
for p in website.paths:
|
for p in website.paths:
|
||||||
url = urljoin(domain, str(p.path))
|
url = urljoin(domain, str(p.path))
|
||||||
for check_key, expected in p.checks:
|
for check_key, expected in p.checks:
|
||||||
|
@ -188,14 +256,22 @@ async def update_from_config(db: Session, config: schemas.Config): # pylint: di
|
||||||
.filter(
|
.filter(
|
||||||
Task.url == url,
|
Task.url == url,
|
||||||
Task.method == p.method,
|
Task.method == p.method,
|
||||||
|
Task.request_data == p.request_data,
|
||||||
Task.check == check_key,
|
Task.check == check_key,
|
||||||
Task.expected == expected,
|
Task.expected == expected,
|
||||||
Task.ip_version == ip_version,
|
Task.ip_version == ip_version,
|
||||||
)
|
)
|
||||||
.all()
|
.all()
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if (ip_version == "4" and ipv4 is False) or (
|
||||||
|
ip_version == "6" and ipv6 is False
|
||||||
|
):
|
||||||
|
continue
|
||||||
|
|
||||||
if existing_tasks:
|
if existing_tasks:
|
||||||
existing_task = existing_tasks[0]
|
existing_task = existing_tasks[0]
|
||||||
|
|
||||||
seen_tasks.append(existing_task.id)
|
seen_tasks.append(existing_task.id)
|
||||||
|
|
||||||
if frequency != existing_task.frequency:
|
if frequency != existing_task.frequency:
|
||||||
|
@ -225,7 +301,14 @@ async def update_from_config(db: Session, config: schemas.Config): # pylint: di
|
||||||
)
|
)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
properties = (url, p.method, check_key, expected, ip_version)
|
properties = (
|
||||||
|
url,
|
||||||
|
p.method,
|
||||||
|
check_key,
|
||||||
|
expected,
|
||||||
|
ip_version,
|
||||||
|
p.request_data,
|
||||||
|
)
|
||||||
if properties not in unique_properties:
|
if properties not in unique_properties:
|
||||||
unique_properties.append(properties)
|
unique_properties.append(properties)
|
||||||
task = Task(
|
task = Task(
|
||||||
|
@ -233,6 +316,7 @@ async def update_from_config(db: Session, config: schemas.Config): # pylint: di
|
||||||
url=url,
|
url=url,
|
||||||
ip_version=ip_version,
|
ip_version=ip_version,
|
||||||
method=p.method,
|
method=p.method,
|
||||||
|
request_data=p.request_data,
|
||||||
check=check_key,
|
check=check_key,
|
||||||
expected=expected,
|
expected=expected,
|
||||||
frequency=frequency,
|
frequency=frequency,
|
||||||
|
|
|
@ -357,8 +357,21 @@ async def set_refresh_cookies_view(
|
||||||
request.url_for("get_severity_counts_view"),
|
request.url_for("get_severity_counts_view"),
|
||||||
status_code=status.HTTP_303_SEE_OTHER,
|
status_code=status.HTTP_303_SEE_OTHER,
|
||||||
)
|
)
|
||||||
response.set_cookie(key="auto_refresh_enabled", value=str(auto_refresh_enabled))
|
# Cookies’ age in Chrome can’t be more than 400 days
|
||||||
|
# https://developer.chrome.com/blog/cookie-max-age-expires
|
||||||
|
delta = int(timedelta(days=400).total_seconds())
|
||||||
response.set_cookie(
|
response.set_cookie(
|
||||||
key="auto_refresh_seconds", value=str(max(5, int(auto_refresh_seconds)))
|
key="auto_refresh_enabled",
|
||||||
|
value=str(auto_refresh_enabled),
|
||||||
|
httponly=True,
|
||||||
|
samesite="strict",
|
||||||
|
expires=delta,
|
||||||
|
)
|
||||||
|
response.set_cookie(
|
||||||
|
key="auto_refresh_seconds",
|
||||||
|
value=str(max(5, int(auto_refresh_seconds))),
|
||||||
|
httponly=True,
|
||||||
|
samesite="strict",
|
||||||
|
expires=delta,
|
||||||
)
|
)
|
||||||
return response
|
return response
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
|
---
|
||||||
general:
|
general:
|
||||||
db:
|
db:
|
||||||
# The database URL, as defined in SQLAlchemy docs : https://docs.sqlalchemy.org/en/20/core/engines.html#database-urls
|
# The database URL, as defined in SQLAlchemy docs:
|
||||||
|
# https://docs.sqlalchemy.org/en/20/core/engines.html#database-urls
|
||||||
url: "sqlite:////tmp/test-argos.db"
|
url: "sqlite:////tmp/test-argos.db"
|
||||||
env: test
|
env: test
|
||||||
cookie_secret: "foo-bar-baz"
|
cookie_secret: "foo-bar-baz"
|
||||||
|
|
|
@ -37,6 +37,7 @@ def ssl_task(now):
|
||||||
domain="https://example.org",
|
domain="https://example.org",
|
||||||
ip_version="6",
|
ip_version="6",
|
||||||
method="GET",
|
method="GET",
|
||||||
|
request_data=None,
|
||||||
task_group="GET-6-https://example.org",
|
task_group="GET-6-https://example.org",
|
||||||
check="ssl-certificate-expiration",
|
check="ssl-certificate-expiration",
|
||||||
retry_before_notification=0,
|
retry_before_notification=0,
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
|
---
|
||||||
- domain: "https://mypads.framapad.org"
|
- domain: "https://mypads.framapad.org"
|
||||||
paths:
|
paths:
|
||||||
- path: "/mypads/"
|
- path: "/mypads/"
|
||||||
checks:
|
checks:
|
||||||
- status-is: 200
|
- status-is: 200
|
||||||
- body-contains: '<div id= "mypads"></div>'
|
- body-contains: '<div id= "mypads"></div>'
|
||||||
|
|
Loading…
Reference in a new issue