mirror of
https://framagit.org/framasoft/framaspace/argos.git
synced 2025-04-28 09:52:38 +02:00
First iterations of the client / server.
- Added new libraries to Pipfile: httpx and click - Refactored the file structure - Added new functionality in logging.py to set log level - README.md now includes information about running the server, running the client, and a sample configuration file - Started working on checks logic
This commit is contained in:
parent
8ac5cdb529
commit
0a4850c1ed
14 changed files with 287 additions and 43 deletions
2
Pipfile
2
Pipfile
|
@ -9,6 +9,8 @@ sqlalchemy = "*"
|
|||
sqlalchemy-utils = "*"
|
||||
uvicorn = "*"
|
||||
pyyaml = "*"
|
||||
httpx = "*"
|
||||
click = "*"
|
||||
|
||||
[dev-packages]
|
||||
|
||||
|
|
40
Pipfile.lock
generated
40
Pipfile.lock
generated
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"_meta": {
|
||||
"hash": {
|
||||
"sha256": "b95f0093b5a31fb370823a1219b4cde22ee7b0e07d9c7bc331aad379f05ee7da"
|
||||
"sha256": "65abfc821a32d62f8da703a8df902b964e88c2acf91eaeb8b7cf9c2dd2e6b4dd"
|
||||
},
|
||||
"pipfile-spec": 6,
|
||||
"requires": {
|
||||
|
@ -32,22 +32,31 @@
|
|||
"markers": "python_version >= '3.7'",
|
||||
"version": "==3.7.1"
|
||||
},
|
||||
"certifi": {
|
||||
"hashes": [
|
||||
"sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082",
|
||||
"sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"
|
||||
],
|
||||
"markers": "python_version >= '3.6'",
|
||||
"version": "==2023.7.22"
|
||||
},
|
||||
"click": {
|
||||
"hashes": [
|
||||
"sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28",
|
||||
"sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"
|
||||
],
|
||||
"index": "pypi",
|
||||
"markers": "python_version >= '3.7'",
|
||||
"version": "==8.1.7"
|
||||
},
|
||||
"fastapi": {
|
||||
"hashes": [
|
||||
"sha256:345844e6a82062f06a096684196aaf96c1198b25c06b72c1311b882aa2d8a35d",
|
||||
"sha256:5e5f17e826dbd9e9b5a5145976c5cd90bcaa61f2bf9a69aca423f2bcebe44d83"
|
||||
"sha256:3270de872f0fe9ec809d4bd3d4d890c6d5cc7b9611d721d6438f9dacc8c4ef2e",
|
||||
"sha256:75a11f6bfb8fc4d2bec0bd710c2d5f2829659c0e8c0afd5560fdda6ce25ec653"
|
||||
],
|
||||
"index": "pypi",
|
||||
"markers": "python_version >= '3.7'",
|
||||
"version": "==0.103.1"
|
||||
"version": "==0.103.2"
|
||||
},
|
||||
"h11": {
|
||||
"hashes": [
|
||||
|
@ -57,6 +66,23 @@
|
|||
"markers": "python_version >= '3.7'",
|
||||
"version": "==0.14.0"
|
||||
},
|
||||
"httpcore": {
|
||||
"hashes": [
|
||||
"sha256:13b5e5cd1dca1a6636a6aaea212b19f4f85cd88c366a2b82304181b769aab3c9",
|
||||
"sha256:adc5398ee0a476567bf87467063ee63584a8bce86078bf748e48754f60202ced"
|
||||
],
|
||||
"markers": "python_version >= '3.8'",
|
||||
"version": "==0.18.0"
|
||||
},
|
||||
"httpx": {
|
||||
"hashes": [
|
||||
"sha256:181ea7f8ba3a82578be86ef4171554dd45fec26a02556a744db029a0a27b7100",
|
||||
"sha256:47ecda285389cb32bb2691cc6e069e3ab0205956f681c5b2ad2325719751d875"
|
||||
],
|
||||
"index": "pypi",
|
||||
"markers": "python_version >= '3.8'",
|
||||
"version": "==0.25.0"
|
||||
},
|
||||
"idna": {
|
||||
"hashes": [
|
||||
"sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4",
|
||||
|
@ -67,11 +93,11 @@
|
|||
},
|
||||
"pydantic": {
|
||||
"hashes": [
|
||||
"sha256:2b2240c8d54bb8f84b88e061fac1bdfa1761c2859c367f9d3afe0ec2966deddc",
|
||||
"sha256:b172505886028e4356868d617d2d1a776d7af1625d1313450fd51bdd19d9d61f"
|
||||
"sha256:94f336138093a5d7f426aac732dcfe7ab4eb4da243c88f891d65deb4a2556ee7",
|
||||
"sha256:bc3ddf669d234f4220e6e1c4d96b061abe0998185a8d7855c0126782b7abc8c1"
|
||||
],
|
||||
"markers": "python_version >= '3.7'",
|
||||
"version": "==2.4.1"
|
||||
"version": "==2.4.2"
|
||||
},
|
||||
"pydantic-core": {
|
||||
"hashes": [
|
||||
|
|
73
README.md
73
README.md
|
@ -12,8 +12,9 @@ Features :
|
|||
- [ ] Checks can be distributed on the network thanks to a job queue ;
|
||||
- [x] Multiple paths per websites can be tested ;
|
||||
- [ ] Handles multiple alerting backends (email, sms, gotify) ;
|
||||
- [ ] Exposes an HTTP API that can be consumed by other systems ;
|
||||
- [x] Exposes an HTTP API that can be consumed by other systems ;
|
||||
- [ ] Exposes a simple read-only website.
|
||||
- [ ] Packaging (and argos-client / argos-server commands)
|
||||
|
||||
Implemented checks :
|
||||
|
||||
|
@ -21,6 +22,76 @@ Implemented checks :
|
|||
- [ ] Returned body matches what you expect ;
|
||||
- [ ] SSL certificate expires in more than X days ;
|
||||
|
||||
## How to run ?
|
||||
|
||||
We're using [pipenv](https://pipenv.pypa.io/) to manage the virtual environment and the dependencies.
|
||||
You can install it with [pipx](https://pypa.github.io/pipx/):
|
||||
|
||||
```bash
|
||||
pipx install pipenv
|
||||
```
|
||||
|
||||
And then, checkout this repository and sync its pipenv
|
||||
|
||||
```bash
|
||||
pipenv sync
|
||||
```
|
||||
|
||||
Once all the dependencies are in place, here is how to run the server:
|
||||
|
||||
```bash
|
||||
pipenv run uvicorn argos.server:app --reload
|
||||
```
|
||||
|
||||
The server will read a `config.yaml` file at startup, and will populate the tasks specified in it. See the configuration section below for more information on how to configure the checks you want to run.
|
||||
|
||||
And here is how to run the client:
|
||||
|
||||
```bash
|
||||
pipenv run python -m argos.client.cli --server http://localhost:8000
|
||||
```
|
||||
|
||||
NB: `argos-server` and `argos-client` commands will be provided in the future.
|
||||
|
||||
## Configuration
|
||||
|
||||
Here is a simple configuration file:
|
||||
|
||||
```yaml
|
||||
general:
|
||||
frequency: 4h # Run checks every 4 hours.
|
||||
alerts:
|
||||
error:
|
||||
- local
|
||||
warning:
|
||||
- local
|
||||
alert:
|
||||
- local
|
||||
service:
|
||||
port: 8888
|
||||
# Can be generated using `openssl rand -base64 32`.
|
||||
secrets:
|
||||
- "O4kt8Max9/k0EmHaEJ0CGGYbBNFmK8kOZNIoUk3Kjwc"
|
||||
- "x1T1VZR51pxrv5pQUyzooMG4pMUvHNMhA5y/3cUsYVs="
|
||||
|
||||
ssl:
|
||||
thresholds:
|
||||
critical: "1d"
|
||||
warning: "10d"
|
||||
|
||||
websites:
|
||||
- domain: "https://blog.notmyidea.org"
|
||||
paths:
|
||||
- path: "/"
|
||||
checks:
|
||||
- status-is: 200
|
||||
- body-contains: "Alexis"
|
||||
- ssl-certificate-expiration: "on-check"
|
||||
- path: "/foo"
|
||||
checks:
|
||||
- status-is: 400
|
||||
```
|
||||
|
||||
## Development notes
|
||||
|
||||
### On service start.
|
||||
|
|
|
@ -1,26 +1,82 @@
|
|||
class HTTPStatusCheck:
|
||||
import httpx
|
||||
from argos.logging import logger
|
||||
|
||||
from argos.schemas import Task
|
||||
from pydantic import BaseModel, Field
|
||||
from typing import Type
|
||||
|
||||
|
||||
class BaseExpectedValue(BaseModel):
|
||||
expected: str
|
||||
|
||||
def get_converted(self):
|
||||
return self.expected
|
||||
|
||||
|
||||
class ExpectedIntValue(BaseExpectedValue):
|
||||
def get_converted(self):
|
||||
return int(self.expected)
|
||||
|
||||
|
||||
class ExpectedStringValue(BaseExpectedValue):
|
||||
pass
|
||||
|
||||
|
||||
class BaseCheck:
|
||||
config: str
|
||||
expected_cls : Type[BaseExpectedValue] = None
|
||||
|
||||
def __init__(self, client: httpx.AsyncClient, task: Task):
|
||||
self.client = client
|
||||
self.task = task
|
||||
|
||||
@property
|
||||
def expected(self):
|
||||
return self.expected_cls(expected=self.task.expected).get_converted()
|
||||
|
||||
|
||||
class HTTPStatusCheck(BaseCheck):
|
||||
config = "status-is"
|
||||
expected_cls = ExpectedIntValue
|
||||
|
||||
def check(resp):
|
||||
return True
|
||||
async def run(self):
|
||||
# XXX Get the method from the task
|
||||
task = self.task
|
||||
response = await self.client.request(method="get", url=task.url)
|
||||
logger.error(f"{response.status_code=}, {self.expected=}")
|
||||
return response.status_code == self.expected
|
||||
|
||||
|
||||
class HTTPBodyContains:
|
||||
class HTTPBodyContains(BaseCheck):
|
||||
config = "body-contains"
|
||||
expected_cls = ExpectedStringValue
|
||||
|
||||
def check(resp):
|
||||
async def run(self):
|
||||
return True
|
||||
|
||||
|
||||
class SSLCertificateExpiration:
|
||||
class SSLCertificateExpiration(BaseCheck):
|
||||
config = "ssl-certificate-expiration"
|
||||
expected_cls = ExpectedStringValue
|
||||
|
||||
def check(resp):
|
||||
async def run(self):
|
||||
return True
|
||||
|
||||
|
||||
AVAILABLE_CHECKS = (HTTPStatusCheck, HTTPBodyContains, SSLCertificateExpiration)
|
||||
|
||||
|
||||
class CheckNotFound(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def get_names(checks=AVAILABLE_CHECKS):
|
||||
return [c.config for c in checks]
|
||||
|
||||
|
||||
def get_check_by_name(name, checks=AVAILABLE_CHECKS):
|
||||
checks_dict = {c.config: c for c in checks}
|
||||
check = checks_dict.get(name)
|
||||
if not check:
|
||||
raise CheckNotFound(name)
|
||||
return check
|
||||
|
|
0
argos/client/__init__.py
Normal file
0
argos/client/__init__.py
Normal file
69
argos/client/cli.py
Normal file
69
argos/client/cli.py
Normal file
|
@ -0,0 +1,69 @@
|
|||
import httpx
|
||||
import asyncio
|
||||
import click
|
||||
from typing import List
|
||||
|
||||
from argos import logging
|
||||
from argos.logging import logger
|
||||
from argos.checks import CheckNotFound, get_check_by_name
|
||||
|
||||
from argos.schemas import Task
|
||||
|
||||
|
||||
async def complete_task(client: httpx.AsyncClient, task: dict) -> dict:
|
||||
task = Task(**task)
|
||||
check_class = get_check_by_name(task.check)
|
||||
check = check_class(client, task)
|
||||
result = await check.run()
|
||||
logger.error(f"{result=}")
|
||||
return {"id": task.id, "result": "completed"}
|
||||
|
||||
|
||||
async def post_results(client: httpx.AsyncClient, server: str, results: List[dict]):
|
||||
response = await client.post(f"{server}/results", json={"results": results})
|
||||
|
||||
if response.status_code == httpx.codes.OK:
|
||||
logger.info("Successfully posted results")
|
||||
else:
|
||||
logger.error(f"Failed to post results: {response.read()}")
|
||||
|
||||
|
||||
async def run(server: str, max_tasks: int):
|
||||
tasks = []
|
||||
|
||||
async with httpx.AsyncClient() as client:
|
||||
# Fetch the list of tasks
|
||||
response = await client.get(f"{server}/tasks")
|
||||
|
||||
if response.status_code == httpx.codes.OK:
|
||||
# XXX Maybe we want to group the tests by URL ? (to issue one request per URL)
|
||||
data = response.json()
|
||||
logger.info(f"Received {len(data)} tasks from the server")
|
||||
|
||||
for task in data:
|
||||
tasks.append(complete_task(client, task))
|
||||
|
||||
# Run up to max_tasks concurrent tasks
|
||||
results = await asyncio.gather(*tasks)
|
||||
|
||||
# Post the results
|
||||
await post_results(client, server, results)
|
||||
else:
|
||||
logger.error(f"Failed to fetch tasks: {response.read()}")
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.option("--server", required=True, help="Server URL")
|
||||
@click.option("--max-tasks", default=10, help="Maximum number of concurrent tasks")
|
||||
@click.option(
|
||||
"--log-level",
|
||||
default="INFO",
|
||||
type=click.Choice(logging.LOG_LEVELS, case_sensitive=False),
|
||||
)
|
||||
def main(server, max_tasks, log_level):
|
||||
logging.set_log_level(log_level)
|
||||
asyncio.run(run(server, max_tasks))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -1,3 +1,14 @@
|
|||
import logging
|
||||
|
||||
LOG_LEVELS = ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']
|
||||
|
||||
# XXX We probably want different loggers for client and server.
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# XXX Does not work ?
|
||||
def set_log_level(log_level):
|
||||
level = getattr(logging, log_level.upper(), None)
|
||||
if not isinstance(level, int):
|
||||
raise ValueError(f"Invalid log level: {log_level}")
|
||||
logger.setLevel(level=level)
|
||||
logger.info("Log level set to {}".format(log_level))
|
||||
|
|
|
@ -7,7 +7,8 @@ import yaml
|
|||
from pydantic import BaseModel, Field, HttpUrl, validator
|
||||
|
||||
from datetime import datetime
|
||||
from argos.checks import get_names as get_check_names
|
||||
# from argos.checks import get_names as get_check_names
|
||||
# XXX Find a way to check without having cirular imports
|
||||
|
||||
# This file contains the pydantic schemas. For the database models, check in argos.model.
|
||||
|
||||
|
@ -21,7 +22,8 @@ class SSL(BaseModel):
|
|||
thresholds: Thresholds
|
||||
|
||||
|
||||
WebsiteCheck = dict[StrEnum("Check", get_check_names()), str | int]
|
||||
WebsiteCheck = dict[str, str | int]
|
||||
# StrEnum("Check", get_check_names()) ?
|
||||
|
||||
|
||||
class WebsitePath(BaseModel):
|
||||
|
|
|
@ -1,15 +1,16 @@
|
|||
from pydantic import BaseModel
|
||||
from datetime import datetime
|
||||
|
||||
# XXX Refactor using SQLModel to avoid duplication of model data
|
||||
|
||||
class Task(BaseModel):
|
||||
id : int
|
||||
max_delta_days: int
|
||||
status: str
|
||||
response: dict
|
||||
last_check: datetime | None
|
||||
selected_by: str | None
|
||||
url: str
|
||||
domain: str
|
||||
check: str
|
||||
expected: str
|
||||
selected_at: datetime | None
|
||||
selected_by : str | None
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
|
1
argos/server/__init__.py
Normal file
1
argos/server/__init__.py
Normal file
|
@ -0,0 +1 @@
|
|||
from argos.server.api import app
|
|
@ -1,14 +1,16 @@
|
|||
from fastapi import Depends, FastAPI, HTTPException
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from . import queries, models, schemas
|
||||
from .database import SessionLocal, engine
|
||||
from .logging import logger
|
||||
from argos.server import queries, models
|
||||
from argos import schemas
|
||||
from argos.server.database import SessionLocal, engine
|
||||
from argos.logging import logger
|
||||
|
||||
models.Base.metadata.create_all(bind=engine)
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
|
||||
# Dependency
|
||||
def get_db():
|
||||
db = SessionLocal()
|
||||
|
@ -17,6 +19,7 @@ def get_db():
|
|||
finally:
|
||||
db.close()
|
||||
|
||||
|
||||
@app.on_event("startup")
|
||||
async def read_config_and_populate_db():
|
||||
# XXX Get filename from environment.
|
||||
|
@ -29,7 +32,7 @@ async def read_config_and_populate_db():
|
|||
db.close()
|
||||
|
||||
|
||||
@app.get("/", response_model=list[schemas.Task])
|
||||
@app.get("/tasks", response_model=list[schemas.Task])
|
||||
async def read_tasks(skip: int = 0, limit: int = 100, db: Session = Depends(get_db)):
|
||||
users = queries.list_tasks(db, limit)
|
||||
return users
|
|
@ -6,7 +6,7 @@ from sqlalchemy.orm import mapped_column
|
|||
|
||||
from datetime import datetime
|
||||
|
||||
from .schemas import WebsiteCheck
|
||||
from argos.schemas import WebsiteCheck
|
||||
|
||||
|
||||
class Base(DeclarativeBase):
|
|
@ -1,14 +1,15 @@
|
|||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy import exists
|
||||
|
||||
from . import schemas
|
||||
from .models import Task
|
||||
from .logging import logger
|
||||
from argos import schemas
|
||||
from argos.logging import logger
|
||||
from argos.server.models import Task
|
||||
|
||||
from urllib.parse import urljoin
|
||||
|
||||
|
||||
def list_tasks(db: Session, limit: int = 100):
|
||||
return db.query(models.Task).limit(limit).all()
|
||||
return db.query(Task).limit(limit).all()
|
||||
|
||||
|
||||
def update_from_config(db: Session, config: schemas.Config):
|
||||
|
@ -18,23 +19,24 @@ def update_from_config(db: Session, config: schemas.Config):
|
|||
url = urljoin(domain, str(p.path))
|
||||
for check in p.checks:
|
||||
for check_key, expected in check.items():
|
||||
# Check the db for already existing tasks.
|
||||
|
||||
existing_task = db.query(exists().where(
|
||||
Task.url == url
|
||||
and Task.check == check_key
|
||||
and Task.expected == expected
|
||||
)).scalar()
|
||||
|
||||
# Check the db for already existing tasks.
|
||||
existing_task = db.query(
|
||||
exists().where(
|
||||
Task.url == url
|
||||
and Task.check == check_key
|
||||
and Task.expected == expected
|
||||
)
|
||||
).scalar()
|
||||
|
||||
if not existing_task:
|
||||
task = Task(
|
||||
domain = domain,
|
||||
url = url,
|
||||
check = check_key,
|
||||
expected = expected
|
||||
domain=domain, url=url, check=check_key, expected=expected
|
||||
)
|
||||
logger.debug(f"Adding a new task in the db: {task=}")
|
||||
db.add(task)
|
||||
else:
|
||||
logger.debug(f"Skipping db task creation for {url=}, {check_key=}, {expected=}.")
|
||||
logger.debug(
|
||||
f"Skipping db task creation for {url=}, {check_key=}, {expected=}."
|
||||
)
|
||||
db.commit()
|
Loading…
Reference in a new issue