mirror of
https://framagit.org/framasoft/framaspace/argos.git
synced 2025-04-28 18:02:41 +02:00
Compare commits
155 commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
9389e3a005 | ||
![]() |
159a6e2427 | ||
![]() |
211ac32028 | ||
![]() |
32f2518294 | ||
![]() |
38cc06e972 | ||
![]() |
4b78919937 | ||
![]() |
d8f30ebccd | ||
![]() |
09674f73ef | ||
![]() |
c63093bb2f | ||
![]() |
657624ed35 | ||
![]() |
471c1eae91 | ||
![]() |
c3708af32a | ||
![]() |
23fea9fffa | ||
![]() |
a48c7b74e6 | ||
![]() |
8d82f7f9d6 | ||
![]() |
fd0c68cd4c | ||
![]() |
c98cd9c017 | ||
![]() |
73e7a8f414 | ||
![]() |
db54dd2cdd | ||
![]() |
1b484da27a | ||
![]() |
07f87a0f7d | ||
![]() |
60f3079140 | ||
![]() |
ca709dca62 | ||
![]() |
0f099b9df4 | ||
![]() |
5abdd8414d | ||
![]() |
06868cdd74 | ||
![]() |
2b82f7c8f2 | ||
![]() |
797a60a85c | ||
![]() |
4c4d3b69b2 | ||
![]() |
c922894567 | ||
![]() |
8652539086 | ||
![]() |
4f3dfd994b | ||
![]() |
28ec85fed3 | ||
![]() |
586660c02a | ||
![]() |
64f8241e74 | ||
![]() |
3d209fed22 | ||
![]() |
acd90133bd | ||
![]() |
be90aa095a | ||
![]() |
06f8310505 | ||
![]() |
fe89d62e88 | ||
![]() |
1e7672abca | ||
![]() |
2ef999fa63 | ||
![]() |
9c8be94c20 | ||
![]() |
311d86d130 | ||
![]() |
e0edb50e12 | ||
![]() |
ea23ea7c1f | ||
![]() |
a1600cb08e | ||
![]() |
0da1f4986e | ||
![]() |
1853b4fead | ||
![]() |
bb4db3ca84 | ||
![]() |
7d21d8d271 | ||
![]() |
868e91b866 | ||
![]() |
ffd24173e5 | ||
![]() |
594fbd6881 | ||
![]() |
04e33a8d24 | ||
![]() |
da221b856b | ||
![]() |
841f8638de | ||
![]() |
5b999184d0 | ||
![]() |
0563cf185a | ||
![]() |
91a9b27106 | ||
![]() |
4117f9f628 | ||
![]() |
8ac2519398 | ||
![]() |
d3766a79c6 | ||
![]() |
759fa05417 | ||
![]() |
a31c12e037 | ||
![]() |
04bbe21a66 | ||
![]() |
fdc219ba5c | ||
![]() |
d3b5a754dd | ||
![]() |
37bd7b0d8d | ||
![]() |
0058e05f15 | ||
![]() |
0ed60508e9 | ||
![]() |
db4f045adf | ||
![]() |
100171356b | ||
![]() |
175f605e35 | ||
![]() |
7c822b10c0 | ||
![]() |
3dd1b3f36f | ||
![]() |
89f4590fb7 | ||
![]() |
839429f460 | ||
![]() |
3b83e4f3e3 | ||
![]() |
c62bf82e0d | ||
![]() |
2c5420cc9d | ||
![]() |
269e551502 | ||
![]() |
3a3c5852d0 | ||
![]() |
6c3c44f5be | ||
![]() |
7998333fc1 | ||
![]() |
3917eb2498 | ||
![]() |
8072a485a1 | ||
![]() |
255fa77ac3 | ||
![]() |
4b78d9ddda | ||
![]() |
7c485a4ad9 | ||
![]() |
5f43f252b4 | ||
![]() |
575fe2ad22 | ||
![]() |
dec6c72238 | ||
![]() |
261f843b46 | ||
![]() |
9b40c5a675 | ||
![]() |
67162f6ce4 | ||
![]() |
1c6abce9b9 | ||
![]() |
353d12240f | ||
![]() |
d2468eff6e | ||
![]() |
95c49c5924 | ||
![]() |
bc3bc52ed0 | ||
![]() |
282f5147a5 | ||
9dc0ffc5ef | |||
![]() |
eb65470935 | ||
![]() |
aac7ca4ec5 | ||
![]() |
a25cfea8c0 | ||
c419133eec | |||
![]() |
7eede341e4 | ||
![]() |
7e5502f7a4 | ||
![]() |
b904f4c35d | ||
![]() |
ef1eb6ed6e | ||
![]() |
77dbc8bb3a | ||
![]() |
fde061da19 | ||
![]() |
9078a1384b | ||
![]() |
5bd4d9909a | ||
![]() |
3b49594bef | ||
![]() |
9102d5f974 | ||
![]() |
512098760a | ||
![]() |
636779cb79 | ||
![]() |
db50aceddb | ||
![]() |
058622556d | ||
![]() |
1c314c4460 | ||
![]() |
4fcf0e282e | ||
![]() |
5bd187f135 | ||
![]() |
98f2ce6f63 | ||
![]() |
b85d7e90a8 | ||
![]() |
1b073376e6 | ||
![]() |
09a858794d | ||
![]() |
29d839dc4d | ||
![]() |
638dcc0295 | ||
![]() |
907cd5878f | ||
![]() |
f848748999 | ||
![]() |
2380c9be7d | ||
![]() |
e2402ac190 | ||
![]() |
6f17fafedb | ||
![]() |
9cffb9d96e | ||
![]() |
20d3371c3c | ||
![]() |
e363e6be4a | ||
![]() |
4f6207509b | ||
![]() |
01ffcc2d28 | ||
![]() |
d863700ecf | ||
![]() |
609b6ad580 | ||
![]() |
f52dd5dd8a | ||
![]() |
cb0a638545 | ||
![]() |
b9e2e62055 | ||
![]() |
4e203d5870 | ||
![]() |
be492ed2ee | ||
![]() |
7bfe676b5c | ||
![]() |
05a8a7bd2e | ||
![]() |
4eb802a48b | ||
![]() |
3ce293b5aa | ||
![]() |
4d806e11aa | ||
![]() |
415c37bdb4 | ||
![]() |
76614a2a50 | ||
![]() |
1457373315 |
87 changed files with 4812 additions and 988 deletions
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -5,5 +5,6 @@ venv
|
||||||
.env
|
.env
|
||||||
public
|
public
|
||||||
*.swp
|
*.swp
|
||||||
|
argos-config.yaml
|
||||||
config.yaml
|
config.yaml
|
||||||
dist
|
dist
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
---
|
||||||
image: python:3.11
|
image: python:3.11
|
||||||
|
|
||||||
stages:
|
stages:
|
||||||
|
@ -18,6 +19,9 @@ default:
|
||||||
|
|
||||||
install:
|
install:
|
||||||
stage: install
|
stage: install
|
||||||
|
before_script:
|
||||||
|
- apt-get update
|
||||||
|
- apt-get install -y build-essential libldap-dev libsasl2-dev
|
||||||
script:
|
script:
|
||||||
- make venv
|
- make venv
|
||||||
- make develop
|
- make develop
|
||||||
|
@ -37,6 +41,12 @@ djlint:
|
||||||
script:
|
script:
|
||||||
- make djlint
|
- make djlint
|
||||||
|
|
||||||
|
mypy:
|
||||||
|
<<: *pull_cache
|
||||||
|
stage: test
|
||||||
|
script:
|
||||||
|
- make mypy
|
||||||
|
|
||||||
pylint:
|
pylint:
|
||||||
<<: *pull_cache
|
<<: *pull_cache
|
||||||
stage: test
|
stage: test
|
||||||
|
@ -51,13 +61,28 @@ format:
|
||||||
script:
|
script:
|
||||||
- make ruff
|
- make ruff
|
||||||
|
|
||||||
|
release_job:
|
||||||
|
stage: deploy
|
||||||
|
image: registry.gitlab.com/gitlab-org/release-cli:latest
|
||||||
|
rules:
|
||||||
|
- if: $CI_COMMIT_TAG
|
||||||
|
script:
|
||||||
|
- sed -n '/^## '$CI_COMMIT_TAG'/,/^#/p' CHANGELOG.md | sed -e '/^\(#\|$\|Date\)/d' > release.md
|
||||||
|
release: # See https://docs.gitlab.com/ee/ci/yaml/#release for available properties
|
||||||
|
tag_name: '$CI_COMMIT_TAG'
|
||||||
|
description: './release.md'
|
||||||
|
assets:
|
||||||
|
links:
|
||||||
|
- name: 'PyPI page'
|
||||||
|
url: 'https://pypi.org/project/argos-monitoring/$CI_COMMIT_TAG/'
|
||||||
|
|
||||||
pages:
|
pages:
|
||||||
<<: *pull_cache
|
<<: *pull_cache
|
||||||
stage: deploy
|
stage: deploy
|
||||||
script:
|
script:
|
||||||
- pwd
|
- sed -e "/Unreleased/,+1d" -i CHANGELOG.md
|
||||||
- ls
|
|
||||||
- make docs
|
- make docs
|
||||||
|
- echo "https://framasoft.frama.io/framaspace/argos/* https://argos-monitoring.framasoft.org/:splat 301" > public/_redirects
|
||||||
artifacts:
|
artifacts:
|
||||||
paths:
|
paths:
|
||||||
- public/
|
- public/
|
||||||
|
|
184
CHANGELOG.md
184
CHANGELOG.md
|
@ -2,6 +2,190 @@
|
||||||
|
|
||||||
## [Unreleased]
|
## [Unreleased]
|
||||||
|
|
||||||
|
## 0.9.0
|
||||||
|
|
||||||
|
Date: 2025-02-18
|
||||||
|
|
||||||
|
- 🐛 — Fix worker timeout for old results cleaning in recurring tasks (#84)
|
||||||
|
|
||||||
|
💥 Old results are now removed by their age, not based on their number.
|
||||||
|
|
||||||
|
💥 Warning: `max_results` setting has been replaced by `max_results_age`, which is a duration.
|
||||||
|
Use `argos server generate-config > /etc/argos/config.yaml-dist` to generate
|
||||||
|
a new example configuration file.
|
||||||
|
|
||||||
|
## 0.8.2
|
||||||
|
|
||||||
|
Date: 2025-02-18
|
||||||
|
|
||||||
|
- 🐛 — Fix recurring tasks with gunicorn
|
||||||
|
|
||||||
|
## 0.8.1
|
||||||
|
|
||||||
|
Date: 2025-02-18
|
||||||
|
|
||||||
|
- 🐛 — Fix todo enum in jobs table
|
||||||
|
|
||||||
|
## 0.8.0
|
||||||
|
|
||||||
|
Date: 2025-02-18
|
||||||
|
|
||||||
|
- ✨ — Allow to customize agent User-Agent header (#78)
|
||||||
|
- 📝 — Document how to add data to requests (#77)
|
||||||
|
- ✨ — No need cron tasks for DB cleaning anymore (#74 and #75)
|
||||||
|
- ✨ — No need cron tasks for agents watching (#76)
|
||||||
|
- ✨ — Reload configuration asynchronously (#79)
|
||||||
|
- 🐛 — Automatically reconnect to LDAP if unreachable (#81)
|
||||||
|
- 🐛 — Better httpx.RequestError handling (#83)
|
||||||
|
|
||||||
|
💥 Warning: there is new settings to add to your configuration file.
|
||||||
|
Use `argos server generate-config > /etc/argos/config.yaml-dist` to generate
|
||||||
|
a new example configuration file.
|
||||||
|
|
||||||
|
💥 You don’t need cron tasks anymore!
|
||||||
|
Remove your old cron tasks as they will now do nothing but generating errors.
|
||||||
|
|
||||||
|
NB: You may want to add `--enqueue` to `reload-config` command in your systemd file.
|
||||||
|
|
||||||
|
## 0.7.4
|
||||||
|
|
||||||
|
Date: 2025-02-12
|
||||||
|
|
||||||
|
- 🐛 — Fix method enum in tasks table (thx to Dryusdan)
|
||||||
|
|
||||||
|
## 0.7.3
|
||||||
|
|
||||||
|
Date: 2025-01-26
|
||||||
|
|
||||||
|
- 🐛 — Fix bug in retry_before_notification logic when success
|
||||||
|
|
||||||
|
## 0.7.2
|
||||||
|
|
||||||
|
Date: 2025-01-24
|
||||||
|
|
||||||
|
- 🐛 — Fix bug in retry_before_notification logic
|
||||||
|
|
||||||
|
## 0.7.1
|
||||||
|
|
||||||
|
Date: 2025-01-15
|
||||||
|
|
||||||
|
- 🩹 — Avoid warning from MySQL only alembic instructions
|
||||||
|
- 🩹 — Check before adding/removing ip_version_enum
|
||||||
|
- 📝 — Improve release documentation
|
||||||
|
|
||||||
|
|
||||||
|
## 0.7.0
|
||||||
|
|
||||||
|
Date: 2025-01-14
|
||||||
|
|
||||||
|
- ✨ — IPv4/IPv6 choice for checks, and choice for a dual-stack check (#69)
|
||||||
|
- ⚡ — Mutualize check requests (#68)
|
||||||
|
- ✨ — Ability to delay notification after X failures (#71)
|
||||||
|
- 🐛 — Fix bug when changing IP version not removing tasks (#72)
|
||||||
|
- ✨ — Allow to specify form data and headers for checks (#70)
|
||||||
|
- 🚸 — Add a long expiration date on auto-refresh cookies
|
||||||
|
- 🗃️ — Use bigint type for results id column in PostgreSQL (#73)
|
||||||
|
|
||||||
|
## 0.6.1
|
||||||
|
|
||||||
|
Date: 2024-11-28
|
||||||
|
|
||||||
|
- 🐛 - Fix database migrations without default values
|
||||||
|
- 🐛 - Fix domain status selector’s bug on page refresh
|
||||||
|
|
||||||
|
## 0.6.0
|
||||||
|
|
||||||
|
Date: 2024-11-28
|
||||||
|
|
||||||
|
- 💄 — Show only not-OK domains by default in domains list, to reduce the load on browser
|
||||||
|
- ♿️ — Fix not-OK domains display if javascript is disabled
|
||||||
|
- ✨ — Retry check right after a httpx.ReadError
|
||||||
|
- ✨ — The HTTP method used by checks is now configurable
|
||||||
|
- ♻️ — Refactor some agent code
|
||||||
|
- 💄 — Filter form on domains list (#66)
|
||||||
|
- ✨ — Add "Remember me" checkbox on login (#65)
|
||||||
|
- ✨ — Add a setting to set a reschedule delay if check failed (#67)
|
||||||
|
BREAKING CHANGE: `mo` is no longer accepted for declaring a duration in month in the configuration
|
||||||
|
You need to use `M`, `month` or `months`
|
||||||
|
- ✨ - Allow to choose a frequency smaller than a minute
|
||||||
|
- ✨🛂 — Allow partial or total anonymous access to web interface (#63)
|
||||||
|
- ✨🛂 — Allow to use a LDAP server for authentication (#64)
|
||||||
|
|
||||||
|
## 0.5.0
|
||||||
|
|
||||||
|
Date: 2024-09-26
|
||||||
|
|
||||||
|
- 💄 — Correctly show results on small screens
|
||||||
|
- 📝💄 — Add opengraph tags to documentation site (#62)
|
||||||
|
- 🔨 — Add a small web server to browse documentation when developing
|
||||||
|
- ✨ — Add new check type: http-to-https (#61)
|
||||||
|
- 👷 — Remove Unreleased section from CHANGELOG when publishing documentation
|
||||||
|
- 🩹 — Severity of ssl-certificate-expiration’s errors is now UNKNOWN (#60)
|
||||||
|
- 💄 — Better display of results’ error details
|
||||||
|
|
||||||
|
## 0.4.1
|
||||||
|
|
||||||
|
Date: 2024-09-18
|
||||||
|
|
||||||
|
- 💄 — Use a custom User-Agent header
|
||||||
|
- 🐛 — Fix mail and gotify alerting
|
||||||
|
|
||||||
|
## 0.4.0
|
||||||
|
|
||||||
|
Date: 2024-09-04
|
||||||
|
|
||||||
|
- 💄 — Improve email and gotify notifications
|
||||||
|
- ✨ — Add command to test gotify configuration
|
||||||
|
- ✨ — Add nagios command to use as a Nagios probe
|
||||||
|
- ✨ — Add Apprise as notification way (#50)
|
||||||
|
|
||||||
|
## 0.3.1
|
||||||
|
|
||||||
|
Date: 2024-09-02
|
||||||
|
|
||||||
|
- ✨ — Add new check types: body-like, headers-like and json-like (#58)
|
||||||
|
|
||||||
|
## 0.3.0
|
||||||
|
|
||||||
|
Date: 2024-09-02
|
||||||
|
|
||||||
|
- 🩹 — Fix release documentation
|
||||||
|
- ✅ — Add mypy test
|
||||||
|
- ✨ — Add new check type: status-in
|
||||||
|
- 🩹 — Close menu after rescheduling non-ok checks (#55)
|
||||||
|
- ✨ — Add new check types: headers-contain and headers-have (#56)
|
||||||
|
- ✨ — Add command to test email configuration (!66)
|
||||||
|
- 💄 — Enhance the mobile view (!67)
|
||||||
|
- ✨ — Allow to run Argos in a subfolder (i.e. not on /) (#59)
|
||||||
|
- ✨ — Add new check types: json-contains, json-has and json-is (#57)
|
||||||
|
|
||||||
|
## 0.2.2
|
||||||
|
|
||||||
|
Date: 2024-07-04
|
||||||
|
|
||||||
|
- 🐛 — Fix bug in login view when having an expired token in cookie (redirect loop)
|
||||||
|
|
||||||
|
## 0.2.1
|
||||||
|
|
||||||
|
Date: 2024-06-27
|
||||||
|
|
||||||
|
- 🐛 — Fix bug in login view when having a cookie (internal server error)
|
||||||
|
|
||||||
|
## 0.2.0
|
||||||
|
|
||||||
|
Date: 2024-06-24
|
||||||
|
|
||||||
|
- 💄📯 — Improve notifications and result(s) pages
|
||||||
|
- 🔊 — Add level of log before the log message
|
||||||
|
- 🔊 — Add a warning message in the logs if there is no tasks in database. (fix #41)
|
||||||
|
- ✨ — Add command to generate example configuration (fix #38)
|
||||||
|
- 📝 — Improve documentation
|
||||||
|
- ✨ — Add command to warn if it’s been long since last viewing an agent (fix #49)
|
||||||
|
- 💥 — Change default config file path to argos-config.yaml (fix #36)
|
||||||
|
- 📝 — New documentation URL: doc is now on https://argos-monitoring.framasoft.org/
|
||||||
|
- 💥 — Remove env vars and only use the configuration file
|
||||||
|
- ✨ — Add built-in authentication for human interface
|
||||||
|
|
||||||
## 0.1.1
|
## 0.1.1
|
||||||
|
|
||||||
Date: 2024-04-30
|
Date: 2024-04-30
|
||||||
|
|
12
Makefile
12
Makefile
|
@ -5,17 +5,19 @@ ORANGE=\033[0;33m
|
||||||
BLUE=\033[0;34m
|
BLUE=\033[0;34m
|
||||||
NC=\033[0m # No Color
|
NC=\033[0m # No Color
|
||||||
|
|
||||||
.PHONY: test lint djlint pylint ruff
|
.PHONY: test lint djlint pylint ruff mypy
|
||||||
|
|
||||||
venv: ## Create the venv
|
venv: ## Create the venv
|
||||||
python3 -m venv venv
|
python3 -m venv venv
|
||||||
develop: venv ## Install the dev dependencies
|
develop: venv ## Install the dev dependencies
|
||||||
venv/bin/pip install -e ".[dev,docs]"
|
venv/bin/pip install -e ".[dev,docs,ldap]"
|
||||||
docs: cog ## Build the docs
|
docs: cog ## Build the docs
|
||||||
venv/bin/sphinx-build docs public
|
venv/bin/sphinx-build docs public
|
||||||
if [ ! -e "public/mermaid.min.js" ]; then curl -sL $$(grep mermaid.min.js public/search.html | cut -f 2 -d '"') --output public/mermaid.min.js; fi
|
if [ ! -e "public/mermaid.min.js" ]; then curl -sL $$(grep mermaid.min.js public/search.html | cut -f 2 -d '"') --output public/mermaid.min.js; fi
|
||||||
sed -e 's@https://unpkg.com/mermaid[^"]*"@mermaid.min.js"@' -i public/search.html public/genindex.html
|
sed -e 's@https://unpkg.com/mermaid[^"]*"@mermaid.min.js"@' -i public/search.html public/genindex.html
|
||||||
sed -e 's@https://unpkg.com/mermaid[^"]*"@../mermaid.min.js"@' -i public/developer/models.html public/developer/overview.html
|
sed -e 's@https://unpkg.com/mermaid[^"]*"@../mermaid.min.js"@' -i public/developer/models.html public/developer/overview.html
|
||||||
|
docs-webserver: docs
|
||||||
|
python3 -m http.server -d public -b 127.0.0.1 8001
|
||||||
cog: ## Run cog, to integrate the CLI options to the docs.
|
cog: ## Run cog, to integrate the CLI options to the docs.
|
||||||
venv/bin/cog -r docs/*.md
|
venv/bin/cog -r docs/*.md
|
||||||
test: venv ## Run the tests
|
test: venv ## Run the tests
|
||||||
|
@ -25,10 +27,12 @@ ruff: venv
|
||||||
ruff-format: venv
|
ruff-format: venv
|
||||||
venv/bin/ruff format .
|
venv/bin/ruff format .
|
||||||
djlint: venv ## Format the templates
|
djlint: venv ## Format the templates
|
||||||
venv/bin/djlint --ignore=H030,H031,H006 --profile jinja --lint argos/server/templates/*html
|
venv/bin/djlint --ignore=H006 --profile jinja --lint argos/server/templates/*html
|
||||||
pylint: venv ## Runs pylint on the code
|
pylint: venv ## Runs pylint on the code
|
||||||
venv/bin/pylint argos
|
venv/bin/pylint argos
|
||||||
lint: djlint pylint ruff
|
mypy: venv
|
||||||
|
venv/bin/mypy argos tests
|
||||||
|
lint: djlint pylint mypy ruff
|
||||||
help:
|
help:
|
||||||
@python3 -c "$$PRINT_HELP_PYSCRIPT" < $(MAKEFILE_LIST)
|
@python3 -c "$$PRINT_HELP_PYSCRIPT" < $(MAKEFILE_LIST)
|
||||||
|
|
||||||
|
|
|
@ -2,13 +2,15 @@
|
||||||
|
|
||||||
A monitoring and status board for your websites.
|
A monitoring and status board for your websites.
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
1. Define a list of websites to monitor
|
1. Define a list of websites to monitor
|
||||||
2. Specify a list of checks to run on these websites.
|
2. Specify a list of checks to run on these websites.
|
||||||
3. Argos will run the checks periodically and alert you if something goes wrong.
|
3. Argos will run the checks periodically and alert you if something goes wrong.
|
||||||
|
|
||||||
Internally, a HTTP API is exposed, and a job queue is used to distribute the checks to the agents.
|
Internally, a HTTP API is exposed, and a job queue is used to distribute the checks to the agents.
|
||||||
|
|
||||||
- [Online documentation](http://framasoft.frama.io/framaspace/argos)
|
- [Online documentation](https://argos-monitoring.framasoft.org/)
|
||||||
- [Issue tracker](https://framagit.org/framasoft/framaspace/argos/-/issues)
|
- [Issue tracker](https://framagit.org/framasoft/framaspace/argos/-/issues)
|
||||||
|
|
||||||
## Requirements
|
## Requirements
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
VERSION = "0.1.1"
|
VERSION = "0.9.0"
|
||||||
|
|
188
argos/agent.py
188
argos/agent.py
|
@ -6,11 +6,14 @@ import asyncio
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import socket
|
import socket
|
||||||
|
from hashlib import md5
|
||||||
|
from time import sleep
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
import httpx
|
import httpx
|
||||||
from tenacity import retry, wait_random # type: ignore
|
from tenacity import retry, wait_random # type: ignore
|
||||||
|
|
||||||
|
from argos import VERSION
|
||||||
from argos.checks import get_registered_check
|
from argos.checks import get_registered_check
|
||||||
from argos.logging import logger
|
from argos.logging import logger
|
||||||
from argos.schemas import AgentResult, SerializableException, Task
|
from argos.schemas import AgentResult, SerializableException, Task
|
||||||
|
@ -31,46 +34,139 @@ def log_failure(retry_state):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class ArgosAgent:
|
class ArgosAgent: # pylint: disable-msg=too-many-instance-attributes
|
||||||
"""The Argos agent is responsible for running the checks and reporting the results."""
|
"""The Argos agent is responsible for running the checks and reporting the results."""
|
||||||
|
|
||||||
def __init__(self, server: str, auth: str, max_tasks: int, wait_time: int):
|
def __init__( # pylint: disable-msg=too-many-positional-arguments
|
||||||
|
self, server: str, auth: str, max_tasks: int, wait_time: int, user_agent: str
|
||||||
|
):
|
||||||
self.server = server
|
self.server = server
|
||||||
self.max_tasks = max_tasks
|
self.max_tasks = max_tasks
|
||||||
self.wait_time = wait_time
|
self.wait_time = wait_time
|
||||||
self.auth = auth
|
self.auth = auth
|
||||||
self._http_client = None
|
if user_agent == "":
|
||||||
|
self.ua = user_agent
|
||||||
|
else:
|
||||||
|
self.ua = f" - {user_agent}"
|
||||||
|
self._http_client: httpx.AsyncClient | None = None
|
||||||
|
self._http_client_v4: httpx.AsyncClient | None = None
|
||||||
|
self._http_client_v6: httpx.AsyncClient | None = None
|
||||||
|
self._res_cache: dict[str, httpx.Response] = {}
|
||||||
|
|
||||||
self.agent_id = socket.gethostname()
|
self.agent_id = socket.gethostname()
|
||||||
|
|
||||||
@retry(after=log_failure, wait=wait_random(min=1, max=2))
|
@retry(after=log_failure, wait=wait_random(min=1, max=2))
|
||||||
async def run(self):
|
async def run(self):
|
||||||
headers = {
|
auth_header = {
|
||||||
"Authorization": f"Bearer {self.auth}",
|
"Authorization": f"Bearer {self.auth}",
|
||||||
|
"User-Agent": f"Argos Panoptes agent {VERSION}{self.ua}",
|
||||||
}
|
}
|
||||||
self._http_client = httpx.AsyncClient(headers=headers)
|
self._http_client = httpx.AsyncClient(headers=auth_header)
|
||||||
|
|
||||||
|
ua_header = {
|
||||||
|
"User-Agent": f"Argos Panoptes {VERSION} "
|
||||||
|
f"(about: https://argos-monitoring.framasoft.org/){self.ua}",
|
||||||
|
}
|
||||||
|
self._http_client_v4 = httpx.AsyncClient(
|
||||||
|
headers=ua_header,
|
||||||
|
transport=httpx.AsyncHTTPTransport(local_address="0.0.0.0"),
|
||||||
|
)
|
||||||
|
self._http_client_v6 = httpx.AsyncClient(
|
||||||
|
headers=ua_header, transport=httpx.AsyncHTTPTransport(local_address="::")
|
||||||
|
)
|
||||||
|
|
||||||
logger.info("Running agent against %s", self.server)
|
logger.info("Running agent against %s", self.server)
|
||||||
async with self._http_client:
|
async with self._http_client:
|
||||||
while "forever":
|
while "forever":
|
||||||
retry_now = await self._get_and_complete_tasks()
|
retry_now = await self._get_and_complete_tasks()
|
||||||
if not retry_now:
|
if not retry_now:
|
||||||
logger.error("Waiting %i seconds before next retry", self.wait_time)
|
logger.info("Waiting %i seconds before next retry", self.wait_time)
|
||||||
await asyncio.sleep(self.wait_time)
|
await asyncio.sleep(self.wait_time)
|
||||||
|
|
||||||
async def _complete_task(self, task: dict) -> dict:
|
async def _do_request(self, group: str, details: dict):
|
||||||
try:
|
logger.debug("_do_request for group %s", group)
|
||||||
task = Task(**task)
|
headers = {}
|
||||||
check_class = get_registered_check(task.check)
|
if details["request_data"] is not None:
|
||||||
check = check_class(self._http_client, task)
|
request_data = json.loads(details["request_data"])
|
||||||
result = await check.run()
|
if request_data["headers"] is not None:
|
||||||
status = result.status
|
headers = request_data["headers"]
|
||||||
context = result.context
|
|
||||||
|
|
||||||
|
if details["ip_version"] == "4":
|
||||||
|
http_client = self._http_client_v4
|
||||||
|
else:
|
||||||
|
http_client = self._http_client_v6
|
||||||
|
try:
|
||||||
|
if details["request_data"] is None or request_data["data"] is None:
|
||||||
|
response = await http_client.request( # type: ignore[union-attr]
|
||||||
|
method=details["method"],
|
||||||
|
url=details["url"],
|
||||||
|
headers=headers,
|
||||||
|
timeout=60,
|
||||||
|
)
|
||||||
|
elif request_data["json"]:
|
||||||
|
response = await http_client.request( # type: ignore[union-attr]
|
||||||
|
method=details["method"],
|
||||||
|
url=details["url"],
|
||||||
|
headers=headers,
|
||||||
|
json=request_data["data"],
|
||||||
|
timeout=60,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
response = await http_client.request( # type: ignore[union-attr]
|
||||||
|
method=details["method"],
|
||||||
|
url=details["url"],
|
||||||
|
headers=headers,
|
||||||
|
data=request_data["data"],
|
||||||
|
timeout=60,
|
||||||
|
)
|
||||||
|
except httpx.ReadError:
|
||||||
|
sleep(1)
|
||||||
|
logger.warning("httpx.ReadError for group %s, re-emit request", group)
|
||||||
|
if details["request_data"] is None or request_data["data"] is None:
|
||||||
|
response = await http_client.request( # type: ignore[union-attr]
|
||||||
|
method=details["method"], url=details["url"], timeout=60
|
||||||
|
)
|
||||||
|
elif request_data["json"]:
|
||||||
|
response = await http_client.request( # type: ignore[union-attr]
|
||||||
|
method=details["method"],
|
||||||
|
url=details["url"],
|
||||||
|
json=request_data["data"],
|
||||||
|
timeout=60,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
response = await http_client.request( # type: ignore[union-attr]
|
||||||
|
method=details["method"],
|
||||||
|
url=details["url"],
|
||||||
|
data=request_data["data"],
|
||||||
|
timeout=60,
|
||||||
|
)
|
||||||
|
except httpx.RequestError as err:
|
||||||
|
logger.warning("httpx.RequestError for group %s", group)
|
||||||
|
response = err
|
||||||
|
|
||||||
|
self._res_cache[group] = response
|
||||||
|
|
||||||
|
async def _complete_task(self, _task: dict) -> AgentResult:
|
||||||
|
try:
|
||||||
|
task = Task(**_task)
|
||||||
|
|
||||||
|
check_class = get_registered_check(task.check)
|
||||||
|
check = check_class(task)
|
||||||
|
|
||||||
|
response = self._res_cache[task.task_group]
|
||||||
|
if isinstance(response, httpx.Response):
|
||||||
|
result = await check.run(response)
|
||||||
|
status = result.status
|
||||||
|
context = result.context
|
||||||
|
else:
|
||||||
|
status = "failure"
|
||||||
|
context = SerializableException.from_exception(response)
|
||||||
except Exception as err: # pylint: disable=broad-except
|
except Exception as err: # pylint: disable=broad-except
|
||||||
status = "error"
|
status = "error"
|
||||||
context = SerializableException.from_exception(err)
|
context = SerializableException.from_exception(err)
|
||||||
msg = f"An exception occured when running {task}. {err.__class__.__name__} : {err}"
|
msg = f"An exception occured when running {_task}. {err.__class__.__name__} : {err}"
|
||||||
logger.error(msg)
|
logger.error(msg)
|
||||||
|
|
||||||
return AgentResult(task_id=task.id, status=status, context=context)
|
return AgentResult(task_id=task.id, status=status, context=context)
|
||||||
|
|
||||||
async def _get_and_complete_tasks(self):
|
async def _get_and_complete_tasks(self):
|
||||||
|
@ -81,12 +177,45 @@ class ArgosAgent:
|
||||||
)
|
)
|
||||||
|
|
||||||
if response.status_code == httpx.codes.OK:
|
if response.status_code == httpx.codes.OK:
|
||||||
# XXX Maybe we want to group the tests by URL ? (to issue one request per URL)
|
|
||||||
data = response.json()
|
data = response.json()
|
||||||
logger.info("Received %i tasks from the server", len(data))
|
logger.info("Received %i tasks from the server", len(data))
|
||||||
|
|
||||||
|
req_groups = {}
|
||||||
|
_tasks = []
|
||||||
|
for _task in data:
|
||||||
|
task = Task(**_task)
|
||||||
|
|
||||||
|
url = task.url
|
||||||
|
group = task.task_group
|
||||||
|
|
||||||
|
if task.check == "http-to-https":
|
||||||
|
data = task.request_data
|
||||||
|
if data is None:
|
||||||
|
data = ""
|
||||||
|
url = str(httpx.URL(task.url).copy_with(scheme="http"))
|
||||||
|
group = (
|
||||||
|
f"{task.method}-{task.ip_version}-{url}-"
|
||||||
|
f"{md5(data.encode()).hexdigest()}"
|
||||||
|
)
|
||||||
|
_task["task_group"] = group
|
||||||
|
|
||||||
|
req_groups[group] = {
|
||||||
|
"url": url,
|
||||||
|
"ip_version": task.ip_version,
|
||||||
|
"method": task.method,
|
||||||
|
"request_data": task.request_data,
|
||||||
|
}
|
||||||
|
_tasks.append(_task)
|
||||||
|
|
||||||
|
requests = []
|
||||||
|
for group, details in req_groups.items():
|
||||||
|
requests.append(self._do_request(group, details))
|
||||||
|
|
||||||
|
if requests:
|
||||||
|
await asyncio.gather(*requests)
|
||||||
|
|
||||||
tasks = []
|
tasks = []
|
||||||
for task in data:
|
for task in _tasks:
|
||||||
tasks.append(self._complete_task(task))
|
tasks.append(self._complete_task(task))
|
||||||
|
|
||||||
if tasks:
|
if tasks:
|
||||||
|
@ -94,7 +223,7 @@ class ArgosAgent:
|
||||||
await self._post_results(results)
|
await self._post_results(results)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
logger.error("Got no tasks from the server.")
|
logger.info("Got no tasks from the server.")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
logger.error("Failed to fetch tasks: %s", response.read())
|
logger.error("Failed to fetch tasks: %s", response.read())
|
||||||
|
@ -102,12 +231,19 @@ class ArgosAgent:
|
||||||
|
|
||||||
async def _post_results(self, results: List[AgentResult]):
|
async def _post_results(self, results: List[AgentResult]):
|
||||||
data = [r.model_dump() for r in results]
|
data = [r.model_dump() for r in results]
|
||||||
response = await self._http_client.post(
|
if self._http_client is not None:
|
||||||
f"{self.server}/api/results", params={"agent_id": self.agent_id}, json=data
|
response = await self._http_client.post(
|
||||||
)
|
f"{self.server}/api/results",
|
||||||
|
params={"agent_id": self.agent_id},
|
||||||
|
json=data,
|
||||||
|
)
|
||||||
|
|
||||||
if response.status_code == httpx.codes.CREATED:
|
if response.status_code == httpx.codes.CREATED:
|
||||||
logger.error("Successfully posted results %s", json.dumps(response.json()))
|
logger.info(
|
||||||
else:
|
"Successfully posted results %s", json.dumps(response.json())
|
||||||
logger.error("Failed to post results: %s", response.read())
|
)
|
||||||
return response
|
else:
|
||||||
|
logger.error("Failed to post results: %s", response.read())
|
||||||
|
return response
|
||||||
|
|
||||||
|
logger.error("self._http_client is None")
|
||||||
|
|
|
@ -1,9 +1,8 @@
|
||||||
"""Various base classes for checks"""
|
"""Various base classes for checks"""
|
||||||
|
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import Type, Union
|
from typing import Type
|
||||||
|
|
||||||
import httpx
|
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
|
|
||||||
from argos.schemas.models import Task
|
from argos.schemas.models import Task
|
||||||
|
@ -71,7 +70,7 @@ class InvalidResponse(Exception):
|
||||||
|
|
||||||
class BaseCheck:
|
class BaseCheck:
|
||||||
config: str
|
config: str
|
||||||
expected_cls: Union[None, Type[BaseExpectedValue]] = None
|
expected_cls: None | Type[BaseExpectedValue] = None
|
||||||
|
|
||||||
_registry = [] # type: ignore[var-annotated]
|
_registry = [] # type: ignore[var-annotated]
|
||||||
|
|
||||||
|
@ -92,8 +91,7 @@ class BaseCheck:
|
||||||
raise CheckNotFound(name)
|
raise CheckNotFound(name)
|
||||||
return check
|
return check
|
||||||
|
|
||||||
def __init__(self, http_client: httpx.AsyncClient, task: Task):
|
def __init__(self, task: Task):
|
||||||
self.http_client = http_client
|
|
||||||
self.task = task
|
self.task = task
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
|
|
@ -1,7 +1,12 @@
|
||||||
"""Define the available checks"""
|
"""Define the available checks"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import re
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
|
from httpx import Response
|
||||||
|
from jsonpointer import resolve_pointer, JsonPointerException
|
||||||
|
|
||||||
from argos.checks.base import (
|
from argos.checks.base import (
|
||||||
BaseCheck,
|
BaseCheck,
|
||||||
ExpectedIntValue,
|
ExpectedIntValue,
|
||||||
|
@ -17,13 +22,7 @@ class HTTPStatus(BaseCheck):
|
||||||
config = "status-is"
|
config = "status-is"
|
||||||
expected_cls = ExpectedIntValue
|
expected_cls = ExpectedIntValue
|
||||||
|
|
||||||
async def run(self) -> dict:
|
async def run(self, response: Response) -> dict:
|
||||||
# XXX Get the method from the task
|
|
||||||
task = self.task
|
|
||||||
response = await self.http_client.request(
|
|
||||||
method="get", url=task.url, timeout=60
|
|
||||||
)
|
|
||||||
|
|
||||||
return self.response(
|
return self.response(
|
||||||
status=response.status_code == self.expected,
|
status=response.status_code == self.expected,
|
||||||
expected=self.expected,
|
expected=self.expected,
|
||||||
|
@ -31,29 +30,240 @@ class HTTPStatus(BaseCheck):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPStatusIn(BaseCheck):
|
||||||
|
"""Checks that the HTTP status code is in the list of expected values."""
|
||||||
|
|
||||||
|
config = "status-in"
|
||||||
|
expected_cls = ExpectedStringValue
|
||||||
|
|
||||||
|
async def run(self, response: Response) -> dict:
|
||||||
|
return self.response(
|
||||||
|
status=response.status_code in json.loads(self.expected),
|
||||||
|
expected=self.expected,
|
||||||
|
retrieved=response.status_code,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPToHTTPS(BaseCheck):
|
||||||
|
"""Checks that the HTTP to HTTPS redirection status code is the expected one."""
|
||||||
|
|
||||||
|
config = "http-to-https"
|
||||||
|
expected_cls = ExpectedStringValue
|
||||||
|
|
||||||
|
async def run(self, response: Response) -> dict:
|
||||||
|
expected_dict = json.loads(self.expected)
|
||||||
|
expected = range(300, 400)
|
||||||
|
if "range" in expected_dict:
|
||||||
|
expected = range(expected_dict["range"][0], expected_dict["range"][1])
|
||||||
|
if "value" in expected_dict:
|
||||||
|
expected = range(expected_dict["value"], expected_dict["value"] + 1)
|
||||||
|
if "list" in expected_dict:
|
||||||
|
expected = expected_dict["list"]
|
||||||
|
|
||||||
|
return self.response(
|
||||||
|
status=response.status_code in expected,
|
||||||
|
expected=self.expected,
|
||||||
|
retrieved=response.status_code,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPHeadersContain(BaseCheck):
|
||||||
|
"""Checks that response headers contains the expected headers
|
||||||
|
(without checking their values)"""
|
||||||
|
|
||||||
|
config = "headers-contain"
|
||||||
|
expected_cls = ExpectedStringValue
|
||||||
|
|
||||||
|
async def run(self, response: Response) -> dict:
|
||||||
|
status = True
|
||||||
|
for header in json.loads(self.expected):
|
||||||
|
if header not in response.headers:
|
||||||
|
status = False
|
||||||
|
break
|
||||||
|
|
||||||
|
return self.response(
|
||||||
|
status=status,
|
||||||
|
expected=self.expected,
|
||||||
|
retrieved=json.dumps(list(dict(response.headers).keys())),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPHeadersHave(BaseCheck):
|
||||||
|
"""Checks that response headers contains the expected headers and values"""
|
||||||
|
|
||||||
|
config = "headers-have"
|
||||||
|
expected_cls = ExpectedStringValue
|
||||||
|
|
||||||
|
async def run(self, response: Response) -> dict:
|
||||||
|
status = True
|
||||||
|
for header, value in json.loads(self.expected).items():
|
||||||
|
if header not in response.headers:
|
||||||
|
status = False
|
||||||
|
break
|
||||||
|
if response.headers[header] != value:
|
||||||
|
status = False
|
||||||
|
break
|
||||||
|
|
||||||
|
return self.response(
|
||||||
|
status=status,
|
||||||
|
expected=self.expected,
|
||||||
|
retrieved=json.dumps(dict(response.headers)),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPHeadersLike(BaseCheck):
|
||||||
|
"""Checks that response headers contains the expected headers and that the values
|
||||||
|
matches the provided regexes"""
|
||||||
|
|
||||||
|
config = "headers-like"
|
||||||
|
expected_cls = ExpectedStringValue
|
||||||
|
|
||||||
|
async def run(self, response: Response) -> dict:
|
||||||
|
status = True
|
||||||
|
for header, value in json.loads(self.expected).items():
|
||||||
|
if header not in response.headers:
|
||||||
|
status = False
|
||||||
|
break
|
||||||
|
if not re.search(rf"{value}", response.headers[header]):
|
||||||
|
status = False
|
||||||
|
break
|
||||||
|
|
||||||
|
return self.response(
|
||||||
|
status=status,
|
||||||
|
expected=self.expected,
|
||||||
|
retrieved=json.dumps(dict(response.headers)),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class HTTPBodyContains(BaseCheck):
|
class HTTPBodyContains(BaseCheck):
|
||||||
"""Checks that the HTTP body contains the expected string."""
|
"""Checks that the HTTP body contains the expected string."""
|
||||||
|
|
||||||
config = "body-contains"
|
config = "body-contains"
|
||||||
expected_cls = ExpectedStringValue
|
expected_cls = ExpectedStringValue
|
||||||
|
|
||||||
async def run(self) -> dict:
|
async def run(self, response: Response) -> dict:
|
||||||
response = await self.http_client.request(
|
|
||||||
method="get", url=self.task.url, timeout=60
|
|
||||||
)
|
|
||||||
return self.response(status=self.expected in response.text)
|
return self.response(status=self.expected in response.text)
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPBodyLike(BaseCheck):
|
||||||
|
"""Checks that the HTTP body matches the provided regex."""
|
||||||
|
|
||||||
|
config = "body-like"
|
||||||
|
expected_cls = ExpectedStringValue
|
||||||
|
|
||||||
|
async def run(self, response: Response) -> dict:
|
||||||
|
if re.search(rf"{self.expected}", response.text):
|
||||||
|
return self.response(status=True)
|
||||||
|
|
||||||
|
return self.response(status=False)
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPJsonContains(BaseCheck):
|
||||||
|
"""Checks that JSON response contains the expected structure
|
||||||
|
(without checking the value)"""
|
||||||
|
|
||||||
|
config = "json-contains"
|
||||||
|
expected_cls = ExpectedStringValue
|
||||||
|
|
||||||
|
async def run(self, response: Response) -> dict:
|
||||||
|
obj = response.json()
|
||||||
|
|
||||||
|
status = True
|
||||||
|
for pointer in json.loads(self.expected):
|
||||||
|
try:
|
||||||
|
resolve_pointer(obj, pointer)
|
||||||
|
except JsonPointerException:
|
||||||
|
status = False
|
||||||
|
break
|
||||||
|
|
||||||
|
return self.response(
|
||||||
|
status=status,
|
||||||
|
expected=self.expected,
|
||||||
|
retrieved=json.dumps(obj),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPJsonHas(BaseCheck):
|
||||||
|
"""Checks that JSON response contains the expected structure and values"""
|
||||||
|
|
||||||
|
config = "json-has"
|
||||||
|
expected_cls = ExpectedStringValue
|
||||||
|
|
||||||
|
async def run(self, response: Response) -> dict:
|
||||||
|
obj = response.json()
|
||||||
|
|
||||||
|
status = True
|
||||||
|
for pointer, exp_value in json.loads(self.expected).items():
|
||||||
|
try:
|
||||||
|
value = resolve_pointer(obj, pointer)
|
||||||
|
if value != exp_value:
|
||||||
|
status = False
|
||||||
|
break
|
||||||
|
except JsonPointerException:
|
||||||
|
status = False
|
||||||
|
break
|
||||||
|
|
||||||
|
return self.response(
|
||||||
|
status=status,
|
||||||
|
expected=self.expected,
|
||||||
|
retrieved=json.dumps(obj),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPJsonLike(BaseCheck):
|
||||||
|
"""Checks that JSON response contains the expected structure and that the values
|
||||||
|
matches the provided regexes"""
|
||||||
|
|
||||||
|
config = "json-like"
|
||||||
|
expected_cls = ExpectedStringValue
|
||||||
|
|
||||||
|
async def run(self, response: Response) -> dict:
|
||||||
|
obj = response.json()
|
||||||
|
|
||||||
|
status = True
|
||||||
|
for pointer, exp_value in json.loads(self.expected).items():
|
||||||
|
try:
|
||||||
|
value = resolve_pointer(obj, pointer)
|
||||||
|
if not re.search(rf"{exp_value:}", value):
|
||||||
|
status = False
|
||||||
|
break
|
||||||
|
except JsonPointerException:
|
||||||
|
status = False
|
||||||
|
break
|
||||||
|
|
||||||
|
return self.response(
|
||||||
|
status=status,
|
||||||
|
expected=self.expected,
|
||||||
|
retrieved=json.dumps(obj),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class HTTPJsonIs(BaseCheck):
|
||||||
|
"""Checks that JSON response is the exact expected JSON object"""
|
||||||
|
|
||||||
|
config = "json-is"
|
||||||
|
expected_cls = ExpectedStringValue
|
||||||
|
|
||||||
|
async def run(self, response: Response) -> dict:
|
||||||
|
obj = response.json()
|
||||||
|
|
||||||
|
status = response.json() == json.loads(self.expected)
|
||||||
|
|
||||||
|
return self.response(
|
||||||
|
status=status,
|
||||||
|
expected=self.expected,
|
||||||
|
retrieved=json.dumps(obj),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class SSLCertificateExpiration(BaseCheck):
|
class SSLCertificateExpiration(BaseCheck):
|
||||||
"""Checks that the SSL certificate will not expire soon."""
|
"""Checks that the SSL certificate will not expire soon."""
|
||||||
|
|
||||||
config = "ssl-certificate-expiration"
|
config = "ssl-certificate-expiration"
|
||||||
expected_cls = ExpectedStringValue
|
expected_cls = ExpectedStringValue
|
||||||
|
|
||||||
async def run(self):
|
async def run(self, response: Response) -> dict:
|
||||||
"""Returns the number of days in which the certificate will expire."""
|
"""Returns the number of days in which the certificate will expire."""
|
||||||
response = await self.http_client.get(self.task.url, timeout=60)
|
|
||||||
|
|
||||||
network_stream = response.extensions["network_stream"]
|
network_stream = response.extensions["network_stream"]
|
||||||
ssl_obj = network_stream.get_extra_info("ssl_object")
|
ssl_obj = network_stream.get_extra_info("ssl_object")
|
||||||
cert = ssl_obj.getpeercert()
|
cert = ssl_obj.getpeercert()
|
||||||
|
@ -65,6 +275,8 @@ class SSLCertificateExpiration(BaseCheck):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
async def finalize(cls, config, result, **context):
|
async def finalize(cls, config, result, **context):
|
||||||
|
if result.status == Status.ERROR:
|
||||||
|
return result.status, Severity.UNKNOWN
|
||||||
if result.status != Status.ON_CHECK:
|
if result.status != Status.ON_CHECK:
|
||||||
return result.status, Severity.WARNING
|
return result.status, Severity.WARNING
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
import asyncio
|
import asyncio
|
||||||
import os
|
import os
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
|
from pathlib import Path
|
||||||
|
from sys import exit as sysexit
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
import click
|
import click
|
||||||
|
@ -8,8 +10,7 @@ import uvicorn
|
||||||
from alembic import command
|
from alembic import command
|
||||||
from alembic.config import Config
|
from alembic.config import Config
|
||||||
|
|
||||||
from argos import logging
|
from argos import VERSION, logging
|
||||||
from argos import VERSION
|
|
||||||
from argos.agent import ArgosAgent
|
from argos.agent import ArgosAgent
|
||||||
|
|
||||||
|
|
||||||
|
@ -32,13 +33,24 @@ def coroutine(f):
|
||||||
|
|
||||||
|
|
||||||
def validate_config_access(ctx, param, value):
|
def validate_config_access(ctx, param, value):
|
||||||
if os.path.isfile(value) and os.access(value, os.R_OK):
|
for file in list(
|
||||||
return value
|
dict.fromkeys([value, "argos-config.yaml", "/etc/argos/config.yaml"])
|
||||||
|
):
|
||||||
|
path = Path(file)
|
||||||
|
|
||||||
if os.path.isfile(value):
|
if path.is_file() and os.access(path, os.R_OK):
|
||||||
raise click.BadParameter(f"the file {value} is not readabale.")
|
return file
|
||||||
|
|
||||||
raise click.BadParameter(f"the file {value} does not exists or is not reachable.")
|
if value == "argos-config.yaml":
|
||||||
|
raise click.BadParameter(
|
||||||
|
f"the file {value} does not exists or is not reachable, "
|
||||||
|
"nor does /etc/argos/config.yaml."
|
||||||
|
)
|
||||||
|
|
||||||
|
raise click.BadParameter(
|
||||||
|
f"the file {value} does not exists or is not reachable, "
|
||||||
|
"nor does argos-config.yaml or /etc/argos/config.yaml."
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@click.group()
|
@click.group()
|
||||||
|
@ -48,11 +60,17 @@ def cli():
|
||||||
|
|
||||||
@cli.group()
|
@cli.group()
|
||||||
def server():
|
def server():
|
||||||
pass
|
"""Commands for managing server, server’s configuration and users"""
|
||||||
|
|
||||||
|
|
||||||
|
@server.group()
|
||||||
|
def user():
|
||||||
|
"""User management"""
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
def version():
|
def version():
|
||||||
|
"""Prints Argos’ version and exits"""
|
||||||
click.echo(VERSION)
|
click.echo(VERSION)
|
||||||
|
|
||||||
|
|
||||||
|
@ -74,8 +92,13 @@ def version():
|
||||||
default="INFO",
|
default="INFO",
|
||||||
type=click.Choice(logging.LOG_LEVELS, case_sensitive=False),
|
type=click.Choice(logging.LOG_LEVELS, case_sensitive=False),
|
||||||
)
|
)
|
||||||
def agent(server_url, auth, max_tasks, wait_time, log_level):
|
@click.option(
|
||||||
"""Get and run tasks to the provided server. Will wait for new tasks.
|
"--user-agent",
|
||||||
|
default="",
|
||||||
|
help="A custom string to append to the User-Agent header",
|
||||||
|
)
|
||||||
|
def agent(server_url, auth, max_tasks, wait_time, log_level, user_agent): # pylint: disable-msg=too-many-positional-arguments
|
||||||
|
"""Get and run tasks for the provided server. Will wait for new tasks.
|
||||||
|
|
||||||
Usage: argos agent https://argos.example.org "auth-token-here"
|
Usage: argos agent https://argos.example.org "auth-token-here"
|
||||||
|
|
||||||
|
@ -90,7 +113,7 @@ def agent(server_url, auth, max_tasks, wait_time, log_level):
|
||||||
from argos.logging import logger
|
from argos.logging import logger
|
||||||
|
|
||||||
logger.setLevel(log_level)
|
logger.setLevel(log_level)
|
||||||
agent_ = ArgosAgent(server_url, auth, max_tasks, wait_time)
|
agent_ = ArgosAgent(server_url, auth, max_tasks, wait_time, user_agent)
|
||||||
asyncio.run(agent_.run())
|
asyncio.run(agent_.run())
|
||||||
|
|
||||||
|
|
||||||
|
@ -99,9 +122,10 @@ def agent(server_url, auth, max_tasks, wait_time, log_level):
|
||||||
@click.option("--port", default=8000, type=int, help="Port to bind")
|
@click.option("--port", default=8000, type=int, help="Port to bind")
|
||||||
@click.option(
|
@click.option(
|
||||||
"--config",
|
"--config",
|
||||||
default="config.yaml",
|
default="argos-config.yaml",
|
||||||
help="Path of the configuration file. "
|
help="Path of the configuration file. "
|
||||||
"If ARGOS_YAML_FILE environment variable is set, its value will be used instead.",
|
"If ARGOS_YAML_FILE environment variable is set, its value will be used instead. "
|
||||||
|
"Default value: argos-config.yaml and /etc/argos/config.yaml as fallback.",
|
||||||
envvar="ARGOS_YAML_FILE",
|
envvar="ARGOS_YAML_FILE",
|
||||||
callback=validate_config_access,
|
callback=validate_config_access,
|
||||||
)
|
)
|
||||||
|
@ -109,109 +133,66 @@ def agent(server_url, auth, max_tasks, wait_time, log_level):
|
||||||
def start(host, port, config, reload):
|
def start(host, port, config, reload):
|
||||||
"""Starts the server (use only for testing or development!)
|
"""Starts the server (use only for testing or development!)
|
||||||
|
|
||||||
See https://framasoft.frama.io/framaspace/argos/deployment/systemd.html#server
|
See https://argos-monitoring.framasoft.org/deployment/systemd.html#server
|
||||||
for advices on how to start the server for production.
|
for advices on how to start the server for production.
|
||||||
"""
|
"""
|
||||||
os.environ["ARGOS_YAML_FILE"] = config
|
os.environ["ARGOS_YAML_FILE"] = config
|
||||||
uvicorn.run("argos.server:app", host=host, port=port, reload=reload)
|
uvicorn.run("argos.server:app", host=host, port=port, reload=reload)
|
||||||
|
|
||||||
|
|
||||||
def validate_max_lock_seconds(ctx, param, value):
|
|
||||||
if value <= 60:
|
|
||||||
raise click.BadParameter("Should be strictly higher than 60")
|
|
||||||
return value
|
|
||||||
|
|
||||||
|
|
||||||
def validate_max_results(ctx, param, value):
|
|
||||||
if value <= 0:
|
|
||||||
raise click.BadParameter("Should be a positive integer")
|
|
||||||
return value
|
|
||||||
|
|
||||||
|
|
||||||
@server.command()
|
|
||||||
@click.option(
|
|
||||||
"--max-results",
|
|
||||||
default=100,
|
|
||||||
help="Number of results per task to keep",
|
|
||||||
callback=validate_max_results,
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--max-lock-seconds",
|
|
||||||
default=100,
|
|
||||||
help=(
|
|
||||||
"The number of seconds after which a lock is "
|
|
||||||
"considered stale, must be higher than 60 "
|
|
||||||
"(the checks have a timeout value of 60 seconds)"
|
|
||||||
),
|
|
||||||
callback=validate_max_lock_seconds,
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--config",
|
|
||||||
default="config.yaml",
|
|
||||||
help="Path of the configuration file. "
|
|
||||||
"If ARGOS_YAML_FILE environment variable is set, its value will be used instead.",
|
|
||||||
envvar="ARGOS_YAML_FILE",
|
|
||||||
callback=validate_config_access,
|
|
||||||
)
|
|
||||||
@coroutine
|
|
||||||
async def cleandb(max_results, max_lock_seconds, config):
|
|
||||||
"""Clean the database (to run routinely)
|
|
||||||
|
|
||||||
\b
|
|
||||||
- Removes old results from the database.
|
|
||||||
- Removes locks from tasks that have been locked for too long.
|
|
||||||
"""
|
|
||||||
# It’s mandatory to do it before the imports
|
|
||||||
os.environ["ARGOS_YAML_FILE"] = config
|
|
||||||
|
|
||||||
# The imports are made here otherwise the agent will need server configuration files.
|
|
||||||
from argos.server import queries
|
|
||||||
|
|
||||||
db = await get_db()
|
|
||||||
removed = await queries.remove_old_results(db, max_results)
|
|
||||||
updated = await queries.release_old_locks(db, max_lock_seconds)
|
|
||||||
|
|
||||||
click.echo(f"{removed} results removed")
|
|
||||||
click.echo(f"{updated} locks released")
|
|
||||||
|
|
||||||
|
|
||||||
@server.command(short_help="Load or reload tasks’ configuration")
|
@server.command(short_help="Load or reload tasks’ configuration")
|
||||||
@click.option(
|
@click.option(
|
||||||
"--config",
|
"--config",
|
||||||
default="config.yaml",
|
default="argos-config.yaml",
|
||||||
help="Path of the configuration file. "
|
help="Path of the configuration file. "
|
||||||
"If ARGOS_YAML_FILE environment variable is set, its value will be used instead.",
|
"If ARGOS_YAML_FILE environment variable is set, its value will be used instead. "
|
||||||
|
"Default value: argos-config.yaml and /etc/argos/config.yaml as fallback.",
|
||||||
envvar="ARGOS_YAML_FILE",
|
envvar="ARGOS_YAML_FILE",
|
||||||
callback=validate_config_access,
|
callback=validate_config_access,
|
||||||
)
|
)
|
||||||
|
@click.option(
|
||||||
|
"--enqueue/--no-enqueue",
|
||||||
|
default=False,
|
||||||
|
help="Let Argos main recurring tasks handle configuration’s loading. "
|
||||||
|
"It may delay the application of the new configuration up to 2 minutes. "
|
||||||
|
"Default is --no-enqueue",
|
||||||
|
)
|
||||||
@coroutine
|
@coroutine
|
||||||
async def reload_config(config):
|
async def reload_config(config, enqueue):
|
||||||
"""Read tasks’ configuration and add/delete tasks in database if needed"""
|
"""Read tasks’ configuration and add/delete tasks in database if needed"""
|
||||||
# It’s mandatory to do it before the imports
|
# It’s mandatory to do it before the imports
|
||||||
os.environ["ARGOS_YAML_FILE"] = config
|
os.environ["ARGOS_YAML_FILE"] = config
|
||||||
|
|
||||||
# The imports are made here otherwise the agent will need server configuration files.
|
# The imports are made here otherwise the agent will need server configuration files.
|
||||||
from argos.server import queries
|
from argos.server import queries
|
||||||
from argos.server.main import get_application, read_config
|
from argos.server.settings import read_config
|
||||||
from argos.server.settings import get_app_settings
|
|
||||||
|
|
||||||
appli = get_application()
|
_config = read_config(config)
|
||||||
settings = get_app_settings()
|
|
||||||
config = read_config(appli, settings)
|
|
||||||
|
|
||||||
db = await get_db()
|
db = await get_db()
|
||||||
changed = await queries.update_from_config(db, config)
|
|
||||||
|
|
||||||
click.echo(f"{changed['added']} tasks added")
|
config_changed = await queries.has_config_changed(db, _config)
|
||||||
click.echo(f"{changed['vanished']} tasks deleted")
|
if not config_changed:
|
||||||
|
click.echo("Config has not change")
|
||||||
|
else:
|
||||||
|
if enqueue:
|
||||||
|
msg = await queries.update_from_config_later(db, config_file=config)
|
||||||
|
|
||||||
|
click.echo(msg)
|
||||||
|
else:
|
||||||
|
changed = await queries.update_from_config(db, _config)
|
||||||
|
|
||||||
|
click.echo(f"{changed['added']} task(s) added")
|
||||||
|
click.echo(f"{changed['vanished']} task(s) deleted")
|
||||||
|
|
||||||
|
|
||||||
@server.command()
|
@server.command()
|
||||||
@click.option(
|
@click.option(
|
||||||
"--config",
|
"--config",
|
||||||
default="config.yaml",
|
default="argos-config.yaml",
|
||||||
help="Path of the configuration file. "
|
help="Path of the configuration file. "
|
||||||
"If ARGOS_YAML_FILE environment variable is set, its value will be used instead.",
|
"If ARGOS_YAML_FILE environment variable is set, its value will be used instead. "
|
||||||
|
"Default value: argos-config.yaml and /etc/argos/config.yaml as fallback.",
|
||||||
envvar="ARGOS_YAML_FILE",
|
envvar="ARGOS_YAML_FILE",
|
||||||
callback=validate_config_access,
|
callback=validate_config_access,
|
||||||
)
|
)
|
||||||
|
@ -222,16 +203,256 @@ async def migrate(config):
|
||||||
os.environ["ARGOS_YAML_FILE"] = config
|
os.environ["ARGOS_YAML_FILE"] = config
|
||||||
|
|
||||||
# The imports are made here otherwise the agent will need server configuration files.
|
# The imports are made here otherwise the agent will need server configuration files.
|
||||||
from argos.server.settings import get_app_settings
|
from argos.server.settings import read_yaml_config
|
||||||
|
|
||||||
settings = get_app_settings()
|
settings = read_yaml_config(config)
|
||||||
|
|
||||||
current_dir = os.path.dirname(__file__)
|
current_dir = Path(__file__).resolve().parent
|
||||||
alembic_cfg = Config(os.path.join(current_dir, "server/migrations/alembic.ini"))
|
alembic_cfg = Config(current_dir / "server" / "migrations" / "alembic.ini")
|
||||||
alembic_cfg.set_main_option("sqlalchemy.url", settings.database_url)
|
alembic_cfg.set_main_option("sqlalchemy.url", str(settings.general.db.url))
|
||||||
command.upgrade(alembic_cfg, "head")
|
command.upgrade(alembic_cfg, "head")
|
||||||
|
|
||||||
|
|
||||||
|
@user.command()
|
||||||
|
@click.option(
|
||||||
|
"--config",
|
||||||
|
default="argos-config.yaml",
|
||||||
|
help="Path of the configuration file. "
|
||||||
|
"If ARGOS_YAML_FILE environment variable is set, its value will be used instead.",
|
||||||
|
envvar="ARGOS_YAML_FILE",
|
||||||
|
callback=validate_config_access,
|
||||||
|
)
|
||||||
|
@click.option("--name", prompt=True, help="Name of the user to create.")
|
||||||
|
@click.password_option()
|
||||||
|
@coroutine
|
||||||
|
async def add(config, name, password):
|
||||||
|
"""Add new user"""
|
||||||
|
# It’s mandatory to do it before the imports
|
||||||
|
os.environ["ARGOS_YAML_FILE"] = config
|
||||||
|
|
||||||
|
# The imports are made here otherwise the agent will need server configuration files.
|
||||||
|
from passlib.context import CryptContext
|
||||||
|
|
||||||
|
from argos.server import queries
|
||||||
|
|
||||||
|
db = await get_db()
|
||||||
|
_user = await queries.get_user(db, name)
|
||||||
|
if _user is not None:
|
||||||
|
click.echo(f"User {name} already exists.")
|
||||||
|
sysexit(1)
|
||||||
|
|
||||||
|
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
||||||
|
await queries.add_user(db, name, pwd_context.hash(password))
|
||||||
|
click.echo(f"User {name} added.")
|
||||||
|
|
||||||
|
|
||||||
|
@user.command()
|
||||||
|
@click.option(
|
||||||
|
"--config",
|
||||||
|
default="argos-config.yaml",
|
||||||
|
help="Path of the configuration file. "
|
||||||
|
"If ARGOS_YAML_FILE environment variable is set, its value will be used instead.",
|
||||||
|
envvar="ARGOS_YAML_FILE",
|
||||||
|
callback=validate_config_access,
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"--name", prompt=True, help="Name of the user you want to change the password."
|
||||||
|
)
|
||||||
|
@click.password_option()
|
||||||
|
@coroutine
|
||||||
|
async def change_password(config, name, password):
|
||||||
|
"""Change user’s password"""
|
||||||
|
# It’s mandatory to do it before the imports
|
||||||
|
os.environ["ARGOS_YAML_FILE"] = config
|
||||||
|
|
||||||
|
# The imports are made here otherwise the agent will need server configuration files.
|
||||||
|
from passlib.context import CryptContext
|
||||||
|
|
||||||
|
from argos.server import queries
|
||||||
|
|
||||||
|
db = await get_db()
|
||||||
|
_user = await queries.get_user(db, name)
|
||||||
|
if _user is None:
|
||||||
|
click.echo(f"User {name} does not exist.")
|
||||||
|
sysexit(1)
|
||||||
|
|
||||||
|
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
||||||
|
_user.password = pwd_context.hash(password)
|
||||||
|
db.commit()
|
||||||
|
click.echo(f"Password of user {name} changed.")
|
||||||
|
|
||||||
|
|
||||||
|
@user.command()
|
||||||
|
@click.option(
|
||||||
|
"--config",
|
||||||
|
default="argos-config.yaml",
|
||||||
|
help="Path of the configuration file. "
|
||||||
|
"If ARGOS_YAML_FILE environment variable is set, its value will be used instead.",
|
||||||
|
envvar="ARGOS_YAML_FILE",
|
||||||
|
callback=validate_config_access,
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"--name", required=True, help="Name of the user you want to test the password for."
|
||||||
|
)
|
||||||
|
@click.option("--password", prompt=True, hide_input=True)
|
||||||
|
@coroutine
|
||||||
|
async def verify_password(config, name, password):
|
||||||
|
"""Test user’s password"""
|
||||||
|
# It’s mandatory to do it before the imports
|
||||||
|
os.environ["ARGOS_YAML_FILE"] = config
|
||||||
|
|
||||||
|
# The imports are made here otherwise the agent will need server configuration files.
|
||||||
|
from passlib.context import CryptContext
|
||||||
|
|
||||||
|
from argos.server import queries
|
||||||
|
|
||||||
|
db = await get_db()
|
||||||
|
_user = await queries.get_user(db, name)
|
||||||
|
if _user is None:
|
||||||
|
click.echo(f"User {name} does not exist.")
|
||||||
|
sysexit(1)
|
||||||
|
|
||||||
|
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
||||||
|
if not pwd_context.verify(password, _user.password):
|
||||||
|
click.echo("Wrong password!")
|
||||||
|
sysexit(2)
|
||||||
|
|
||||||
|
click.echo("The provided password is correct.")
|
||||||
|
|
||||||
|
|
||||||
|
@user.command()
|
||||||
|
@click.option(
|
||||||
|
"--config",
|
||||||
|
default="argos-config.yaml",
|
||||||
|
help="Path of the configuration file. "
|
||||||
|
"If ARGOS_YAML_FILE environment variable is set, its value will be used instead.",
|
||||||
|
envvar="ARGOS_YAML_FILE",
|
||||||
|
callback=validate_config_access,
|
||||||
|
)
|
||||||
|
@click.option("--name", required=True, help="Name of the user to disable.")
|
||||||
|
@coroutine
|
||||||
|
async def disable(config, name):
|
||||||
|
"""Disable user"""
|
||||||
|
# It’s mandatory to do it before the imports
|
||||||
|
os.environ["ARGOS_YAML_FILE"] = config
|
||||||
|
|
||||||
|
# The imports are made here otherwise the agent will need server configuration files.
|
||||||
|
from argos.server import queries
|
||||||
|
|
||||||
|
db = await get_db()
|
||||||
|
_user = await queries.get_user(db, name)
|
||||||
|
if _user is None:
|
||||||
|
click.echo(f"User {name} does not exist.")
|
||||||
|
sysexit(1)
|
||||||
|
if _user.disabled:
|
||||||
|
click.echo(f"User {name} is already disabled.")
|
||||||
|
sysexit(2)
|
||||||
|
|
||||||
|
_user.disabled = True
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
click.echo(f"User {name} disabled.")
|
||||||
|
|
||||||
|
|
||||||
|
@user.command()
|
||||||
|
@click.option(
|
||||||
|
"--config",
|
||||||
|
default="argos-config.yaml",
|
||||||
|
help="Path of the configuration file. "
|
||||||
|
"If ARGOS_YAML_FILE environment variable is set, its value will be used instead.",
|
||||||
|
envvar="ARGOS_YAML_FILE",
|
||||||
|
callback=validate_config_access,
|
||||||
|
)
|
||||||
|
@click.option("--name", required=True, help="Name of the user to reenable")
|
||||||
|
@coroutine
|
||||||
|
async def enable(config, name):
|
||||||
|
"""Enable user"""
|
||||||
|
# It’s mandatory to do it before the imports
|
||||||
|
os.environ["ARGOS_YAML_FILE"] = config
|
||||||
|
|
||||||
|
# The imports are made here otherwise the agent will need server configuration files.
|
||||||
|
from argos.server import queries
|
||||||
|
|
||||||
|
db = await get_db()
|
||||||
|
_user = await queries.get_user(db, name)
|
||||||
|
if _user is None:
|
||||||
|
click.echo(f"User {name} does not exist.")
|
||||||
|
sysexit(1)
|
||||||
|
if not _user.disabled:
|
||||||
|
click.echo(f"User {name} is already enabled.")
|
||||||
|
sysexit(2)
|
||||||
|
|
||||||
|
_user.disabled = False
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
click.echo(f"User {name} enabled.")
|
||||||
|
|
||||||
|
|
||||||
|
@user.command()
|
||||||
|
@click.option(
|
||||||
|
"--config",
|
||||||
|
default="argos-config.yaml",
|
||||||
|
help="Path of the configuration file. "
|
||||||
|
"If ARGOS_YAML_FILE environment variable is set, its value will be used instead.",
|
||||||
|
envvar="ARGOS_YAML_FILE",
|
||||||
|
callback=validate_config_access,
|
||||||
|
)
|
||||||
|
@click.option("--name", required=True, help="Name of the user to delete.")
|
||||||
|
@coroutine
|
||||||
|
async def delete(config, name):
|
||||||
|
"""Delete user"""
|
||||||
|
# It’s mandatory to do it before the imports
|
||||||
|
os.environ["ARGOS_YAML_FILE"] = config
|
||||||
|
|
||||||
|
# The imports are made here otherwise the agent will need server configuration files.
|
||||||
|
from argos.server import queries
|
||||||
|
|
||||||
|
db = await get_db()
|
||||||
|
_user = await queries.get_user(db, name)
|
||||||
|
if _user is None:
|
||||||
|
click.echo(f"User {name} does not exist.")
|
||||||
|
sysexit(1)
|
||||||
|
|
||||||
|
db.delete(_user)
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
click.echo(f"User {name} deleted.")
|
||||||
|
|
||||||
|
|
||||||
|
@user.command()
|
||||||
|
@click.option(
|
||||||
|
"--config",
|
||||||
|
default="argos-config.yaml",
|
||||||
|
help="Path of the configuration file. "
|
||||||
|
"If ARGOS_YAML_FILE environment variable is set, its value will be used instead.",
|
||||||
|
envvar="ARGOS_YAML_FILE",
|
||||||
|
callback=validate_config_access,
|
||||||
|
)
|
||||||
|
@coroutine
|
||||||
|
async def show(config):
|
||||||
|
"""List all users"""
|
||||||
|
# It’s mandatory to do it before the imports
|
||||||
|
os.environ["ARGOS_YAML_FILE"] = config
|
||||||
|
|
||||||
|
# The imports are made here otherwise the agent will need server configuration files.
|
||||||
|
from argos.server import queries
|
||||||
|
|
||||||
|
db = await get_db()
|
||||||
|
users = await queries.list_users(db)
|
||||||
|
if users.count() == 0:
|
||||||
|
click.echo("There is no users in database.")
|
||||||
|
sysexit(1)
|
||||||
|
|
||||||
|
click.echo("✅ means that the user is enabled.")
|
||||||
|
click.echo("❌ means that the user is disabled.")
|
||||||
|
|
||||||
|
for _user in users.all():
|
||||||
|
status = "✅"
|
||||||
|
if _user.disabled:
|
||||||
|
status = "❌"
|
||||||
|
click.echo(f"{status} {_user.username}, last login: {_user.last_login_at}")
|
||||||
|
|
||||||
|
|
||||||
@server.command(short_help="Generate a token for agents")
|
@server.command(short_help="Generate a token for agents")
|
||||||
@coroutine
|
@coroutine
|
||||||
async def generate_token():
|
async def generate_token():
|
||||||
|
@ -242,5 +463,270 @@ async def generate_token():
|
||||||
click.echo(uuid4())
|
click.echo(uuid4())
|
||||||
|
|
||||||
|
|
||||||
|
@server.command()
|
||||||
|
@coroutine
|
||||||
|
async def generate_config():
|
||||||
|
"""Output a self-documented example config file.
|
||||||
|
|
||||||
|
\b
|
||||||
|
Redirect the output to a file to save it:
|
||||||
|
argos server generate-config > /etc/argos/config.yaml
|
||||||
|
"""
|
||||||
|
config_example = Path(__file__).resolve().parent / "config-example.yaml"
|
||||||
|
with config_example.open("r", encoding="utf-8") as f:
|
||||||
|
print(f.read())
|
||||||
|
|
||||||
|
|
||||||
|
@server.command()
|
||||||
|
@click.option(
|
||||||
|
"--config",
|
||||||
|
default="argos-config.yaml",
|
||||||
|
help="Path of the configuration file. "
|
||||||
|
"If ARGOS_YAML_FILE environment variable is set, its value will be used instead.",
|
||||||
|
envvar="ARGOS_YAML_FILE",
|
||||||
|
callback=validate_config_access,
|
||||||
|
)
|
||||||
|
@click.option("--domain", help="Domain for the notification", default="example.org")
|
||||||
|
@click.option("--severity", help="Severity", default="CRITICAL")
|
||||||
|
@coroutine
|
||||||
|
async def test_mail(config, domain, severity):
|
||||||
|
"""Send a test email"""
|
||||||
|
os.environ["ARGOS_YAML_FILE"] = config
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from argos.logging import set_log_level
|
||||||
|
from argos.server.alerting import notify_by_mail
|
||||||
|
from argos.server.models import Result, Task
|
||||||
|
from argos.server.settings import read_config
|
||||||
|
|
||||||
|
conf = read_config(config)
|
||||||
|
|
||||||
|
if not conf.general.mail:
|
||||||
|
click.echo("Mail is not configured, cannot test", err=True)
|
||||||
|
sysexit(1)
|
||||||
|
else:
|
||||||
|
now = datetime.now()
|
||||||
|
task = Task(
|
||||||
|
url=f"https://{domain}",
|
||||||
|
domain=domain,
|
||||||
|
check="body-contains",
|
||||||
|
expected="foo",
|
||||||
|
frequency=1,
|
||||||
|
ip_version=4,
|
||||||
|
selected_by="test",
|
||||||
|
selected_at=now,
|
||||||
|
)
|
||||||
|
|
||||||
|
result = Result(
|
||||||
|
submitted_at=now,
|
||||||
|
status="success",
|
||||||
|
context={"foo": "bar"},
|
||||||
|
task=task,
|
||||||
|
agent_id="test",
|
||||||
|
severity="ok",
|
||||||
|
)
|
||||||
|
|
||||||
|
class _FalseRequest:
|
||||||
|
def url_for(*args, **kwargs):
|
||||||
|
return "/url"
|
||||||
|
|
||||||
|
set_log_level("debug")
|
||||||
|
notify_by_mail(
|
||||||
|
result,
|
||||||
|
task,
|
||||||
|
severity=severity,
|
||||||
|
old_severity="OLD SEVERITY",
|
||||||
|
config=conf.general.mail,
|
||||||
|
request=_FalseRequest(),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@server.command()
|
||||||
|
@click.option(
|
||||||
|
"--config",
|
||||||
|
default="argos-config.yaml",
|
||||||
|
help="Path of the configuration file. "
|
||||||
|
"If ARGOS_YAML_FILE environment variable is set, its value will be used instead.",
|
||||||
|
envvar="ARGOS_YAML_FILE",
|
||||||
|
callback=validate_config_access,
|
||||||
|
)
|
||||||
|
@click.option("--domain", help="Domain for the notification", default="example.org")
|
||||||
|
@click.option("--severity", help="Severity", default="CRITICAL")
|
||||||
|
@coroutine
|
||||||
|
async def test_gotify(config, domain, severity):
|
||||||
|
"""Send a test gotify notification"""
|
||||||
|
os.environ["ARGOS_YAML_FILE"] = config
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from argos.logging import set_log_level
|
||||||
|
from argos.server.alerting import notify_with_gotify
|
||||||
|
from argos.server.models import Result, Task
|
||||||
|
from argos.server.settings import read_config
|
||||||
|
|
||||||
|
conf = read_config(config)
|
||||||
|
|
||||||
|
if not conf.general.gotify:
|
||||||
|
click.echo("Gotify notifications are not configured, cannot test", err=True)
|
||||||
|
sysexit(1)
|
||||||
|
else:
|
||||||
|
now = datetime.now()
|
||||||
|
task = Task(
|
||||||
|
url=f"https://{domain}",
|
||||||
|
domain=domain,
|
||||||
|
check="body-contains",
|
||||||
|
expected="foo",
|
||||||
|
frequency=1,
|
||||||
|
ip_version=4,
|
||||||
|
selected_by="test",
|
||||||
|
selected_at=now,
|
||||||
|
)
|
||||||
|
|
||||||
|
result = Result(
|
||||||
|
submitted_at=now,
|
||||||
|
status="success",
|
||||||
|
context={"foo": "bar"},
|
||||||
|
task=task,
|
||||||
|
agent_id="test",
|
||||||
|
severity="ok",
|
||||||
|
)
|
||||||
|
|
||||||
|
class _FalseRequest:
|
||||||
|
def url_for(*args, **kwargs):
|
||||||
|
return "/url"
|
||||||
|
|
||||||
|
set_log_level("debug")
|
||||||
|
notify_with_gotify(
|
||||||
|
result,
|
||||||
|
task,
|
||||||
|
severity=severity,
|
||||||
|
old_severity="OLD SEVERITY",
|
||||||
|
config=conf.general.gotify,
|
||||||
|
request=_FalseRequest(),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@server.command()
|
||||||
|
@click.option(
|
||||||
|
"--config",
|
||||||
|
default="argos-config.yaml",
|
||||||
|
help="Path of the configuration file. "
|
||||||
|
"If ARGOS_YAML_FILE environment variable is set, its value will be used instead.",
|
||||||
|
envvar="ARGOS_YAML_FILE",
|
||||||
|
callback=validate_config_access,
|
||||||
|
)
|
||||||
|
@click.option("--domain", help="Domain for the notification", default="example.org")
|
||||||
|
@click.option("--severity", help="Severity", default="CRITICAL")
|
||||||
|
@click.option(
|
||||||
|
"--apprise-group", help="Apprise group for the notification", required=True
|
||||||
|
)
|
||||||
|
@coroutine
|
||||||
|
async def test_apprise(config, domain, severity, apprise_group):
|
||||||
|
"""Send a test apprise notification"""
|
||||||
|
os.environ["ARGOS_YAML_FILE"] = config
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
from argos.logging import set_log_level
|
||||||
|
from argos.server.alerting import notify_with_apprise
|
||||||
|
from argos.server.models import Result, Task
|
||||||
|
from argos.server.settings import read_config
|
||||||
|
|
||||||
|
conf = read_config(config)
|
||||||
|
|
||||||
|
if not conf.general.apprise:
|
||||||
|
click.echo("Apprise notifications are not configured, cannot test", err=True)
|
||||||
|
sysexit(1)
|
||||||
|
else:
|
||||||
|
now = datetime.now()
|
||||||
|
task = Task(
|
||||||
|
url=f"https://{domain}",
|
||||||
|
domain=domain,
|
||||||
|
check="body-contains",
|
||||||
|
expected="foo",
|
||||||
|
frequency=1,
|
||||||
|
ip_version=4,
|
||||||
|
selected_by="test",
|
||||||
|
selected_at=now,
|
||||||
|
)
|
||||||
|
|
||||||
|
result = Result(
|
||||||
|
submitted_at=now,
|
||||||
|
status="success",
|
||||||
|
context={"foo": "bar"},
|
||||||
|
task=task,
|
||||||
|
agent_id="test",
|
||||||
|
severity="ok",
|
||||||
|
)
|
||||||
|
|
||||||
|
class _FalseRequest:
|
||||||
|
def url_for(*args, **kwargs):
|
||||||
|
return "/url"
|
||||||
|
|
||||||
|
set_log_level("debug")
|
||||||
|
notify_with_apprise(
|
||||||
|
result,
|
||||||
|
task,
|
||||||
|
severity=severity,
|
||||||
|
old_severity="OLD SEVERITY",
|
||||||
|
group=conf.general.apprise[apprise_group],
|
||||||
|
request=_FalseRequest(),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@server.command(short_help="Nagios compatible severities report")
|
||||||
|
@click.option(
|
||||||
|
"--config",
|
||||||
|
default="argos-config.yaml",
|
||||||
|
help="Path of the configuration file. "
|
||||||
|
"If ARGOS_YAML_FILE environment variable is set, its value will be used instead.",
|
||||||
|
envvar="ARGOS_YAML_FILE",
|
||||||
|
callback=validate_config_access,
|
||||||
|
)
|
||||||
|
@coroutine
|
||||||
|
async def nagios(config):
|
||||||
|
"""Output a report of current severities suitable for Nagios
|
||||||
|
with a Nagios compatible exit code"""
|
||||||
|
os.environ["ARGOS_YAML_FILE"] = config
|
||||||
|
|
||||||
|
# The imports are made here otherwise the agent will need server configuration files.
|
||||||
|
from argos.server import queries
|
||||||
|
|
||||||
|
exit_nb = 0
|
||||||
|
db = await get_db()
|
||||||
|
severities = await queries.get_severity_counts(db)
|
||||||
|
|
||||||
|
if severities["warning"] != 0:
|
||||||
|
exit_nb = 1
|
||||||
|
if severities["critical"] != 0:
|
||||||
|
exit_nb = 2
|
||||||
|
if severities["unknown"] != 0:
|
||||||
|
exit_nb = 2
|
||||||
|
|
||||||
|
stats = (
|
||||||
|
f"ok={severities['ok']}; warning={severities['warning']}; "
|
||||||
|
f"critical={severities['critical']}; unknown={severities['unknown']};"
|
||||||
|
)
|
||||||
|
|
||||||
|
if exit_nb == 0:
|
||||||
|
print("OK — All sites are ok|{stats}")
|
||||||
|
elif exit_nb == 1:
|
||||||
|
print(f"WARNING — {severities['warning']} sites are in warning state|{stats}")
|
||||||
|
elif severities["critical"] == 0:
|
||||||
|
print(f"UNKNOWN — {severities['unknown']} sites are in unknown state|{stats}")
|
||||||
|
elif severities["unknown"] == 0:
|
||||||
|
print(
|
||||||
|
f"CRITICAL — {severities['critical']} sites are in critical state|{stats}"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
print(
|
||||||
|
f"CRITICAL/UNKNOWN — {severities['critical']} sites are in critical state "
|
||||||
|
f"and {severities['unknown']} sites are in unknown state|{stats}"
|
||||||
|
)
|
||||||
|
|
||||||
|
sysexit(exit_nb)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
cli()
|
cli()
|
||||||
|
|
275
argos/config-example.yaml
Normal file
275
argos/config-example.yaml
Normal file
|
@ -0,0 +1,275 @@
|
||||||
|
---
|
||||||
|
general:
|
||||||
|
# Except for frequency and recheck_delay settings, changes in general
|
||||||
|
# section of the configuration will need a restart of argos server.
|
||||||
|
db:
|
||||||
|
# The database URL, as defined in SQLAlchemy docs :
|
||||||
|
# https://docs.sqlalchemy.org/en/20/core/engines.html#database-urls
|
||||||
|
# Example for SQLite: "sqlite:////tmp/argos.db"
|
||||||
|
url: "postgresql://argos:argos@localhost/argos"
|
||||||
|
# You configure the size of the database pool of connection, and
|
||||||
|
# the max overflow (until when new connections are accepted ?)
|
||||||
|
# For details, see
|
||||||
|
# https://docs.sqlalchemy.org/en/20/core/pooling.html#sqlalchemy.pool.QueuePool.params.pool_size
|
||||||
|
pool_size: 10
|
||||||
|
max_overflow: 20
|
||||||
|
# Can be "production", "dev", "test".
|
||||||
|
# If not present, default value is "production"
|
||||||
|
env: "production"
|
||||||
|
# To get a good string for cookie_secret, run:
|
||||||
|
# openssl rand -hex 32
|
||||||
|
cookie_secret: "foo_bar_baz"
|
||||||
|
|
||||||
|
# Session duration
|
||||||
|
# Use m for minutes, h for hours, d for days
|
||||||
|
# w for weeks, M for months, y for years
|
||||||
|
# See https://github.com/timwedde/durations_nlp#scales-reference for details
|
||||||
|
# If not present, default value is "7d"
|
||||||
|
session_duration: "7d"
|
||||||
|
# Session opened with "Remember me" checked
|
||||||
|
# If not present, the "Remember me" feature is not available
|
||||||
|
# remember_me_duration: "1M"
|
||||||
|
|
||||||
|
# Unauthenticated access
|
||||||
|
# If can grant an unauthenticated access to the dashboard or to all pages
|
||||||
|
# To do so, choose either "dashboard", or "all"
|
||||||
|
# If not present, all pages needs authentication
|
||||||
|
# unauthenticated_access: "all"
|
||||||
|
|
||||||
|
# LDAP authentication
|
||||||
|
# Instead of relying on Argos’ users, use a LDAP server to authenticate users.
|
||||||
|
# If not present, Argos’ native user system is used.
|
||||||
|
# ldap:
|
||||||
|
# # Server URI
|
||||||
|
# uri: "ldaps://ldap.example.org"
|
||||||
|
# # Search base DN
|
||||||
|
# user_tree: "ou=users,dc=example,dc=org"
|
||||||
|
# # Search bind DN
|
||||||
|
# bind_dn: "uid=ldap_user,ou=users,dc=example,dc=org"
|
||||||
|
# # Search bind password
|
||||||
|
# bind_pwd: "secr3t"
|
||||||
|
# # User attribute (uid, mail, sAMAccountName, etc.)
|
||||||
|
# user_attr: "uid"
|
||||||
|
# # User filter (to exclude some users, etc.)
|
||||||
|
# user_filter: "(!(uid=ldap_user))"
|
||||||
|
|
||||||
|
# Default delay for checks.
|
||||||
|
# Can be superseeded in domain configuration.
|
||||||
|
# For ex., to run checks every 5 minutes:
|
||||||
|
frequency: "5m"
|
||||||
|
# Default re-check delay if a check has failed.
|
||||||
|
# Can be superseeded in domain configuration.
|
||||||
|
# If not present, failed checked won’t be re-checked (they will be
|
||||||
|
# run again like if they succeded
|
||||||
|
# For ex., to re-try a check one minute after a failure:
|
||||||
|
# recheck_delay: "1m"
|
||||||
|
|
||||||
|
# Default setting for notifications delay.
|
||||||
|
# Say you want to be warned right after a failure on a check: set it to 0
|
||||||
|
# Say you want a second failure on the check before being warned,
|
||||||
|
# to avoid network hiccups: set it to 1
|
||||||
|
# Can be superseeded in domain configuration
|
||||||
|
# If not present, default is 0
|
||||||
|
# retry_before_notification: 0
|
||||||
|
|
||||||
|
# Defaults settings for IPv4/IPv6
|
||||||
|
# Can be superseeded in domain configuration.
|
||||||
|
# By default, Argos will check both IPv4 and IPv6 addresses of a domain
|
||||||
|
# (i.e. by default, both `ipv4` and `ipv6` are set to true).
|
||||||
|
# To disable the IPv4 check of domains:
|
||||||
|
# ipv4: false
|
||||||
|
# To disable the IPv6 check of domains:
|
||||||
|
# ipv6: false
|
||||||
|
|
||||||
|
# Argos root path
|
||||||
|
# If not present, default value is ""
|
||||||
|
# Set it to /foo if you want to use argos at /foo/ instead of /
|
||||||
|
# on your web server
|
||||||
|
# root_path: "/foo"
|
||||||
|
|
||||||
|
# Which way do you want to be warned when a check goes to that severity?
|
||||||
|
# "local" emits a message in the server log
|
||||||
|
# You’ll need to configure mail, gotify or apprise below to be able to use
|
||||||
|
# them here.
|
||||||
|
# Use "apprise:john", "apprise:team" (with the quotes!) to use apprise
|
||||||
|
# notification groups.
|
||||||
|
alerts:
|
||||||
|
ok:
|
||||||
|
- local
|
||||||
|
warning:
|
||||||
|
- local
|
||||||
|
critical:
|
||||||
|
- local
|
||||||
|
unknown:
|
||||||
|
- local
|
||||||
|
# This alert is triggered when no Argos agent has been seen in a while
|
||||||
|
# See recurring_tasks.time_without_agent below
|
||||||
|
no_agent:
|
||||||
|
- local
|
||||||
|
# Mail configuration is quite straight-forward
|
||||||
|
# mail:
|
||||||
|
# mailfrom: no-reply@example.org
|
||||||
|
# host: 127.0.0.1
|
||||||
|
# port: 25
|
||||||
|
# ssl: False
|
||||||
|
# starttls: False
|
||||||
|
# auth:
|
||||||
|
# login: foo
|
||||||
|
# password: bar
|
||||||
|
# addresses:
|
||||||
|
# - foo@admin.example.org
|
||||||
|
# - bar@admin.example.org
|
||||||
|
# Create an app on your Gotify server and put its token here
|
||||||
|
# See https://gotify.net/ for details about Gotify
|
||||||
|
# gotify:
|
||||||
|
# - url: https://example.org
|
||||||
|
# tokens:
|
||||||
|
# - foo
|
||||||
|
# - bar
|
||||||
|
# See https://github.com/caronc/apprise#productivity-based-notifications
|
||||||
|
# for apprise’s URLs syntax.
|
||||||
|
# You need to surround the URLs with quotes like in the examples below.
|
||||||
|
# Use "apprise:john", "apprise:team" (with the quotes!) in "alerts" settings.
|
||||||
|
# apprise:
|
||||||
|
# john:
|
||||||
|
# - "mastodon://access_key@hostname/@user"
|
||||||
|
# - "matrixs://token@hostname:port/?webhook=matrix"
|
||||||
|
# team:
|
||||||
|
# - "mmosts://user@hostname/authkey"
|
||||||
|
# - "nctalks://user:pass@host/RoomId1/RoomId2/RoomIdN"
|
||||||
|
|
||||||
|
service:
|
||||||
|
secrets:
|
||||||
|
# Secrets can be generated using `argos server generate-token`.
|
||||||
|
# You need at least one. Write them as a list, like:
|
||||||
|
# - secret_token
|
||||||
|
|
||||||
|
ssl:
|
||||||
|
thresholds:
|
||||||
|
- "1d": critical
|
||||||
|
- "5d": warning
|
||||||
|
|
||||||
|
# Argos will execute some tasks in the background for you
|
||||||
|
# every 2 minutes and needs some configuration for that
|
||||||
|
recurring_tasks:
|
||||||
|
# Maximum age of results
|
||||||
|
# Use m for minutes, h for hours, d for days
|
||||||
|
# w for weeks, M for months, y for years
|
||||||
|
# See https://github.com/timwedde/durations_nlp#scales-reference for details
|
||||||
|
max_results_age: "1d"
|
||||||
|
# Max number of seconds a task can be locked
|
||||||
|
# Minimum value is 61, default is 100
|
||||||
|
max_lock_seconds: 100
|
||||||
|
# Max number of minutes without seing an agent
|
||||||
|
# before sending an alert
|
||||||
|
# Minimum value is 1, default is 5
|
||||||
|
time_without_agent: 5
|
||||||
|
|
||||||
|
# It's also possible to define the checks in another file
|
||||||
|
# with the include syntax:
|
||||||
|
#
|
||||||
|
# websites: !include websites.yaml
|
||||||
|
#
|
||||||
|
websites:
|
||||||
|
- domain: "https://mypads.example.org"
|
||||||
|
# Wait for a second failure before sending notification
|
||||||
|
retry_before_notification: 1
|
||||||
|
paths:
|
||||||
|
- path: "/mypads/"
|
||||||
|
# Specify the method of the HTTP request
|
||||||
|
# Valid values are "GET", "HEAD", "POST", "OPTIONS",
|
||||||
|
# "CONNECT", "TRACE", "PUT", "PATCH" and "DELETE"
|
||||||
|
# default is "GET" if omitted
|
||||||
|
method: "GET"
|
||||||
|
checks:
|
||||||
|
# Check that the returned HTTP status is 200
|
||||||
|
- status-is: 200
|
||||||
|
# Check that the response contains this string
|
||||||
|
- body-contains: '<div id= "mypads"></div>'
|
||||||
|
# Check that the response matches this regex
|
||||||
|
- body-like: MyPads .* accounts
|
||||||
|
# Check that the SSL certificate is no older than ssl.thresholds
|
||||||
|
- ssl-certificate-expiration: "on-check"
|
||||||
|
# Check that the response contains this headers
|
||||||
|
# The comparison is case insensitive
|
||||||
|
- headers-contain:
|
||||||
|
- "content-encoding"
|
||||||
|
- "content-type"
|
||||||
|
# Check that there is a HTTP to HTTPS redirection with 3xx status code
|
||||||
|
- http-to-https: true
|
||||||
|
# Check that there is a HTTP to HTTPS redirection with 301 status code
|
||||||
|
- http-to-https: 301
|
||||||
|
# Check that there is a HTTP to HTTPS redirection with a status code
|
||||||
|
# in the provided range (stop value excluded)
|
||||||
|
- http-to-https:
|
||||||
|
start: 301
|
||||||
|
stop: 308
|
||||||
|
# Check that there is a HTTP to HTTPS redirection with a status code
|
||||||
|
# in the provided list
|
||||||
|
- http-to-https:
|
||||||
|
- 301
|
||||||
|
- 302
|
||||||
|
- 307
|
||||||
|
- path: "/admin/"
|
||||||
|
methode: "POST"
|
||||||
|
# Send form data in the request
|
||||||
|
request_data:
|
||||||
|
data:
|
||||||
|
login: "admin"
|
||||||
|
password: "my-password"
|
||||||
|
# To send data as JSON (optional, default is false):
|
||||||
|
is_json: true
|
||||||
|
# To send additional headers
|
||||||
|
headers:
|
||||||
|
Authorization: "Bearer foo-bar-baz"
|
||||||
|
checks:
|
||||||
|
# Check that the return HTTP status is one of those
|
||||||
|
# Similar to status-is, verify that you don’t mistyped it!
|
||||||
|
- status-in:
|
||||||
|
- 401
|
||||||
|
- 301
|
||||||
|
# Check that the response contains this headers and values
|
||||||
|
# It’s VERY important to respect the 4 spaces indentation here!
|
||||||
|
# The name of the headers is case insensitive
|
||||||
|
- headers-have:
|
||||||
|
content-encoding: "gzip"
|
||||||
|
content-type: "text/html"
|
||||||
|
# Checks that response headers contains the expected headers and
|
||||||
|
# that the values matches the provided regexes
|
||||||
|
# You have to double the escape character \
|
||||||
|
- headers-like:
|
||||||
|
content-encoding: "gzip|utf"
|
||||||
|
content-type: "text/(html|css)"
|
||||||
|
- path: "/my-stats.json"
|
||||||
|
checks:
|
||||||
|
# Check that JSON response contains the expected structure
|
||||||
|
- json-contains:
|
||||||
|
- /foo/bar/0
|
||||||
|
- /foo/bar/1
|
||||||
|
- /timestamp
|
||||||
|
# Check that JSON response contains the expected structure and values
|
||||||
|
# It’s VERY important to respect the 4 spaces indentation here!
|
||||||
|
- json-has:
|
||||||
|
/maintenance: false
|
||||||
|
/productname: "Nextcloud"
|
||||||
|
# Check that JSON response contains the expected structure and
|
||||||
|
# that the values matches the provided regexes
|
||||||
|
# You have to double the escape character \
|
||||||
|
- json-like:
|
||||||
|
/productname: ".*cloud"
|
||||||
|
/versionstring: "29\\..*"
|
||||||
|
# Check that JSON response is the exact expected JSON object
|
||||||
|
# The order of the items in the object does not matter.
|
||||||
|
- json-is: '{"foo": "bar", "baz": 42}'
|
||||||
|
- domain: "https://munin.example.org"
|
||||||
|
frequency: "20m"
|
||||||
|
recheck_delay: "5m"
|
||||||
|
# Let’s say it’s an IPv6 only web site
|
||||||
|
ipv4: false
|
||||||
|
paths:
|
||||||
|
- path: "/"
|
||||||
|
checks:
|
||||||
|
- status-is: 301
|
||||||
|
- path: "/munin/"
|
||||||
|
checks:
|
||||||
|
- status-is: 401
|
|
@ -1,15 +1,23 @@
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
|
||||||
|
logging.getLogger("passlib").setLevel(logging.ERROR)
|
||||||
|
|
||||||
|
|
||||||
LOG_LEVELS = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]
|
LOG_LEVELS = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]
|
||||||
|
|
||||||
|
# Print level before message
|
||||||
|
logging.basicConfig(format="%(levelname)-9s %(message)s")
|
||||||
|
|
||||||
# XXX We probably want different loggers for client and server.
|
# XXX We probably want different loggers for client and server.
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
# XXX Does not work ?
|
# XXX Does not work ?
|
||||||
def set_log_level(log_level):
|
def set_log_level(log_level: str, quiet: bool = False):
|
||||||
level = getattr(logging, log_level.upper(), None)
|
level = getattr(logging, log_level.upper(), None)
|
||||||
if not isinstance(level, int):
|
if not isinstance(level, int):
|
||||||
raise ValueError(f"Invalid log level: {log_level}")
|
raise ValueError(f"Invalid log level: {log_level}")
|
||||||
logger.setLevel(level=level)
|
logger.setLevel(level=level)
|
||||||
logger.info("Log level set to %s", log_level)
|
if not quiet:
|
||||||
|
logger.info("Log level set to %s", log_level)
|
||||||
|
|
|
@ -2,29 +2,44 @@
|
||||||
|
|
||||||
For database models, see argos.server.models.
|
For database models, see argos.server.models.
|
||||||
"""
|
"""
|
||||||
from typing import Dict, List, Literal, Optional, Tuple
|
|
||||||
|
|
||||||
|
import json
|
||||||
|
|
||||||
|
from typing import Any, Dict, List, Literal, Tuple
|
||||||
|
|
||||||
|
from durations_nlp import Duration
|
||||||
from pydantic import (
|
from pydantic import (
|
||||||
BaseModel,
|
BaseModel,
|
||||||
ConfigDict,
|
ConfigDict,
|
||||||
HttpUrl,
|
HttpUrl,
|
||||||
|
PostgresDsn,
|
||||||
StrictBool,
|
StrictBool,
|
||||||
EmailStr,
|
EmailStr,
|
||||||
PositiveInt,
|
PositiveInt,
|
||||||
field_validator,
|
field_validator,
|
||||||
)
|
)
|
||||||
from pydantic.functional_validators import BeforeValidator
|
from pydantic.functional_validators import AfterValidator, BeforeValidator
|
||||||
|
from pydantic.networks import UrlConstraints
|
||||||
|
from pydantic_core import Url
|
||||||
from typing_extensions import Annotated
|
from typing_extensions import Annotated
|
||||||
|
|
||||||
from argos.schemas.utils import string_to_duration
|
from argos.schemas.utils import Method
|
||||||
|
|
||||||
Severity = Literal["warning", "error", "critical", "unknown"]
|
Severity = Literal["warning", "error", "critical", "unknown"]
|
||||||
|
Environment = Literal["dev", "test", "production"]
|
||||||
|
Unauthenticated = Literal["dashboard", "all"]
|
||||||
|
SQLiteDsn = Annotated[
|
||||||
|
Url,
|
||||||
|
UrlConstraints(
|
||||||
|
allowed_schemes=["sqlite"],
|
||||||
|
),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
def parse_threshold(value):
|
def parse_threshold(value):
|
||||||
"""Parse duration threshold for SSL certificate validity"""
|
"""Parse duration threshold for SSL certificate validity"""
|
||||||
for duration_str, severity in value.items():
|
for duration_str, severity in value.items():
|
||||||
days = string_to_duration(duration_str, "days")
|
days = Duration(duration_str).to_days()
|
||||||
# Return here because it's one-item dicts.
|
# Return here because it's one-item dicts.
|
||||||
return (days, severity)
|
return (days, severity)
|
||||||
|
|
||||||
|
@ -33,6 +48,33 @@ class SSL(BaseModel):
|
||||||
thresholds: List[Annotated[Tuple[int, Severity], BeforeValidator(parse_threshold)]]
|
thresholds: List[Annotated[Tuple[int, Severity], BeforeValidator(parse_threshold)]]
|
||||||
|
|
||||||
|
|
||||||
|
class RecurringTasks(BaseModel):
|
||||||
|
max_results_age: float
|
||||||
|
max_lock_seconds: int
|
||||||
|
time_without_agent: int
|
||||||
|
|
||||||
|
@field_validator("max_results_age", mode="before")
|
||||||
|
def parse_max_results_age(cls, value):
|
||||||
|
"""Convert the configured maximum results age to seconds"""
|
||||||
|
return Duration(value).to_seconds()
|
||||||
|
|
||||||
|
@field_validator("max_lock_seconds", mode="before")
|
||||||
|
def parse_max_lock_seconds(cls, value):
|
||||||
|
"""Ensure that max_lock_seconds is higher or equal to agent’s requests timeout (60)"""
|
||||||
|
if value > 60:
|
||||||
|
return value
|
||||||
|
|
||||||
|
return 100
|
||||||
|
|
||||||
|
@field_validator("time_without_agent", mode="before")
|
||||||
|
def parse_time_without_agent(cls, value):
|
||||||
|
"""Ensure that time_without_agent is at least one minute"""
|
||||||
|
if value >= 1:
|
||||||
|
return value
|
||||||
|
|
||||||
|
return 5
|
||||||
|
|
||||||
|
|
||||||
class WebsiteCheck(BaseModel):
|
class WebsiteCheck(BaseModel):
|
||||||
key: str
|
key: str
|
||||||
value: str | List[str] | Dict[str, str]
|
value: str | List[str] | Dict[str, str]
|
||||||
|
@ -66,13 +108,49 @@ def parse_checks(value):
|
||||||
if name not in available_names:
|
if name not in available_names:
|
||||||
msg = f"Check should be one of f{available_names}. ({name} given)"
|
msg = f"Check should be one of f{available_names}. ({name} given)"
|
||||||
raise ValueError(msg)
|
raise ValueError(msg)
|
||||||
if isinstance(expected, int):
|
if name == "http-to-https":
|
||||||
expected = str(expected)
|
if isinstance(expected, int) and expected in range(300, 400):
|
||||||
|
expected = json.dumps({"value": expected})
|
||||||
|
elif isinstance(expected, list):
|
||||||
|
expected = json.dumps({"list": expected})
|
||||||
|
elif (
|
||||||
|
isinstance(expected, dict)
|
||||||
|
and "start" in expected
|
||||||
|
and "stop" in expected
|
||||||
|
):
|
||||||
|
expected = json.dumps({"range": [expected["start"], expected["stop"]]})
|
||||||
|
else:
|
||||||
|
expected = json.dumps({"range": [300, 400]})
|
||||||
|
else:
|
||||||
|
if isinstance(expected, int):
|
||||||
|
expected = str(expected)
|
||||||
|
if isinstance(expected, list):
|
||||||
|
expected = json.dumps(expected)
|
||||||
|
if isinstance(expected, dict):
|
||||||
|
expected = json.dumps(expected)
|
||||||
return (name, expected)
|
return (name, expected)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_request_data(value):
|
||||||
|
"""Turn form or JSON data into JSON string"""
|
||||||
|
|
||||||
|
return json.dumps(
|
||||||
|
{"data": value.data, "json": value.is_json, "headers": value.headers}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class RequestData(BaseModel):
|
||||||
|
data: Any = None
|
||||||
|
is_json: bool = False
|
||||||
|
headers: Dict[str, str] | None = None
|
||||||
|
|
||||||
|
|
||||||
class WebsitePath(BaseModel):
|
class WebsitePath(BaseModel):
|
||||||
path: str
|
path: str
|
||||||
|
method: Method = "GET"
|
||||||
|
request_data: Annotated[
|
||||||
|
RequestData, AfterValidator(parse_request_data)
|
||||||
|
] | None = None
|
||||||
checks: List[
|
checks: List[
|
||||||
Annotated[
|
Annotated[
|
||||||
Tuple[str, str],
|
Tuple[str, str],
|
||||||
|
@ -83,14 +161,26 @@ class WebsitePath(BaseModel):
|
||||||
|
|
||||||
class Website(BaseModel):
|
class Website(BaseModel):
|
||||||
domain: HttpUrl
|
domain: HttpUrl
|
||||||
frequency: Optional[int] = None
|
ipv4: bool | None = None
|
||||||
|
ipv6: bool | None = None
|
||||||
|
frequency: float | None = None
|
||||||
|
recheck_delay: float | None = None
|
||||||
|
retry_before_notification: int | None = None
|
||||||
paths: List[WebsitePath]
|
paths: List[WebsitePath]
|
||||||
|
|
||||||
@field_validator("frequency", mode="before")
|
@field_validator("frequency", mode="before")
|
||||||
def parse_frequency(cls, value):
|
def parse_frequency(cls, value):
|
||||||
"""Convert the configured frequency to minutes"""
|
"""Convert the configured frequency to minutes"""
|
||||||
if value:
|
if value:
|
||||||
return string_to_duration(value, "minutes")
|
return Duration(value).to_minutes()
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
@field_validator("recheck_delay", mode="before")
|
||||||
|
def parse_recheck_delay(cls, value):
|
||||||
|
"""Convert the configured recheck delay to minutes"""
|
||||||
|
if value:
|
||||||
|
return Duration(value).to_minutes()
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
@ -116,7 +206,7 @@ class Mail(BaseModel):
|
||||||
port: PositiveInt = 25
|
port: PositiveInt = 25
|
||||||
ssl: StrictBool = False
|
ssl: StrictBool = False
|
||||||
starttls: StrictBool = False
|
starttls: StrictBool = False
|
||||||
auth: Optional[MailAuth] = None
|
auth: MailAuth | None = None
|
||||||
addresses: List[EmailStr]
|
addresses: List[EmailStr]
|
||||||
|
|
||||||
|
|
||||||
|
@ -127,6 +217,7 @@ class Alert(BaseModel):
|
||||||
warning: List[str]
|
warning: List[str]
|
||||||
critical: List[str]
|
critical: List[str]
|
||||||
unknown: List[str]
|
unknown: List[str]
|
||||||
|
no_agent: List[str]
|
||||||
|
|
||||||
|
|
||||||
class GotifyUrl(BaseModel):
|
class GotifyUrl(BaseModel):
|
||||||
|
@ -134,22 +225,72 @@ class GotifyUrl(BaseModel):
|
||||||
tokens: List[str]
|
tokens: List[str]
|
||||||
|
|
||||||
|
|
||||||
|
class DbSettings(BaseModel):
|
||||||
|
url: PostgresDsn | SQLiteDsn
|
||||||
|
pool_size: int = 10
|
||||||
|
max_overflow: int = 20
|
||||||
|
|
||||||
|
|
||||||
|
class LdapSettings(BaseModel):
|
||||||
|
uri: str
|
||||||
|
user_tree: str
|
||||||
|
bind_dn: str | None = None
|
||||||
|
bind_pwd: str | None = None
|
||||||
|
user_attr: str
|
||||||
|
user_filter: str | None = None
|
||||||
|
|
||||||
|
|
||||||
class General(BaseModel):
|
class General(BaseModel):
|
||||||
"""Frequency for the checks and alerts"""
|
"""Frequency for the checks and alerts"""
|
||||||
|
|
||||||
frequency: int
|
db: DbSettings
|
||||||
|
env: Environment = "production"
|
||||||
|
cookie_secret: str
|
||||||
|
session_duration: int = 10080 # 7 days
|
||||||
|
remember_me_duration: int | None = None
|
||||||
|
unauthenticated_access: Unauthenticated | None = None
|
||||||
|
ldap: LdapSettings | None = None
|
||||||
|
frequency: float
|
||||||
|
recheck_delay: float | None = None
|
||||||
|
retry_before_notification: int = 0
|
||||||
|
ipv4: bool = True
|
||||||
|
ipv6: bool = True
|
||||||
|
root_path: str = ""
|
||||||
alerts: Alert
|
alerts: Alert
|
||||||
mail: Optional[Mail] = None
|
mail: Mail | None = None
|
||||||
gotify: Optional[List[GotifyUrl]] = None
|
gotify: List[GotifyUrl] | None = None
|
||||||
|
apprise: Dict[str, List[str]] | None = None
|
||||||
|
|
||||||
|
@field_validator("session_duration", mode="before")
|
||||||
|
def parse_session_duration(cls, value):
|
||||||
|
"""Convert the configured session duration to minutes"""
|
||||||
|
return Duration(value).to_minutes()
|
||||||
|
|
||||||
|
@field_validator("remember_me_duration", mode="before")
|
||||||
|
def parse_remember_me_duration(cls, value):
|
||||||
|
"""Convert the configured session duration with remember me feature to minutes"""
|
||||||
|
if value:
|
||||||
|
return int(Duration(value).to_minutes())
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
@field_validator("frequency", mode="before")
|
@field_validator("frequency", mode="before")
|
||||||
def parse_frequency(cls, value):
|
def parse_frequency(cls, value):
|
||||||
"""Convert the configured frequency to minutes"""
|
"""Convert the configured frequency to minutes"""
|
||||||
return string_to_duration(value, "minutes")
|
return Duration(value).to_minutes()
|
||||||
|
|
||||||
|
@field_validator("recheck_delay", mode="before")
|
||||||
|
def parse_recheck_delay(cls, value):
|
||||||
|
"""Convert the configured recheck delay to minutes"""
|
||||||
|
if value:
|
||||||
|
return Duration(value).to_minutes()
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
class Config(BaseModel):
|
class Config(BaseModel):
|
||||||
general: General
|
general: General
|
||||||
service: Service
|
service: Service
|
||||||
ssl: SSL
|
ssl: SSL
|
||||||
|
recurring_tasks: RecurringTasks
|
||||||
websites: List[Website]
|
websites: List[Website]
|
||||||
|
|
|
@ -8,17 +8,39 @@ from typing import Literal
|
||||||
|
|
||||||
from pydantic import BaseModel, ConfigDict
|
from pydantic import BaseModel, ConfigDict
|
||||||
|
|
||||||
|
from argos.schemas.utils import IPVersion, Method, Todo
|
||||||
|
|
||||||
# XXX Refactor using SQLModel to avoid duplication of model data
|
# XXX Refactor using SQLModel to avoid duplication of model data
|
||||||
|
|
||||||
|
|
||||||
|
class Job(BaseModel):
|
||||||
|
"""Tasks needing to be executed in recurring tasks processing.
|
||||||
|
It’s quite like a job queue."""
|
||||||
|
|
||||||
|
id: int
|
||||||
|
todo: Todo
|
||||||
|
args: str
|
||||||
|
current: bool
|
||||||
|
added_at: datetime
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return f"Job ({self.id}): {self.todo}"
|
||||||
|
|
||||||
|
|
||||||
class Task(BaseModel):
|
class Task(BaseModel):
|
||||||
"""A task corresponds to a check to execute"""
|
"""A task corresponds to a check to execute"""
|
||||||
|
|
||||||
id: int
|
id: int
|
||||||
url: str
|
url: str
|
||||||
domain: str
|
domain: str
|
||||||
|
ip_version: IPVersion
|
||||||
check: str
|
check: str
|
||||||
|
method: Method
|
||||||
|
request_data: str | None
|
||||||
expected: str
|
expected: str
|
||||||
|
task_group: str
|
||||||
|
retry_before_notification: int
|
||||||
|
contiguous_failures: int
|
||||||
selected_at: datetime | None
|
selected_at: datetime | None
|
||||||
selected_by: str | None
|
selected_by: str | None
|
||||||
|
|
||||||
|
@ -28,7 +50,8 @@ class Task(BaseModel):
|
||||||
task_id = self.id
|
task_id = self.id
|
||||||
url = self.url
|
url = self.url
|
||||||
check = self.check
|
check = self.check
|
||||||
return f"Task ({task_id}): {url} - {check}"
|
ip_version = self.ip_version
|
||||||
|
return f"Task ({task_id}): {url} (IPv{ip_version}) - {check}"
|
||||||
|
|
||||||
|
|
||||||
class SerializableException(BaseModel):
|
class SerializableException(BaseModel):
|
||||||
|
|
|
@ -1,42 +1,10 @@
|
||||||
from typing import Literal, Union
|
from typing import Literal
|
||||||
|
|
||||||
|
|
||||||
def string_to_duration(
|
IPVersion = Literal["4", "6"]
|
||||||
value: str, target: Literal["days", "hours", "minutes"]
|
|
||||||
) -> Union[int, float]:
|
|
||||||
"""Convert a string to a number of hours, days or minutes"""
|
|
||||||
num = int("".join(filter(str.isdigit, value)))
|
|
||||||
|
|
||||||
# It's not possible to convert from a smaller unit to a greater one:
|
Method = Literal[
|
||||||
# - hours and minutes cannot be converted to days
|
"GET", "HEAD", "POST", "OPTIONS", "CONNECT", "TRACE", "PUT", "PATCH", "DELETE"
|
||||||
# - minutes cannot be converted to hours
|
]
|
||||||
if (target == "days" and ("h" in value or "m" in value.replace("mo", ""))) or (
|
|
||||||
target == "hours" and "m" in value.replace("mo", "")
|
|
||||||
):
|
|
||||||
msg = (
|
|
||||||
"Durations cannot be converted from a smaller to a greater unit. "
|
|
||||||
f"(trying to convert '{value}' to {target})"
|
|
||||||
)
|
|
||||||
raise ValueError(msg, value)
|
|
||||||
|
|
||||||
# Consider we're converting to minutes, do the eventual multiplication at the end.
|
Todo = Literal["RELOAD_CONFIG"]
|
||||||
if "h" in value:
|
|
||||||
num = num * 60
|
|
||||||
elif "d" in value:
|
|
||||||
num = num * 60 * 24
|
|
||||||
elif "w" in value:
|
|
||||||
num = num * 60 * 24 * 7
|
|
||||||
elif "mo" in value:
|
|
||||||
num = num * 60 * 24 * 30 # considers 30d in a month
|
|
||||||
elif "y" in value:
|
|
||||||
num = num * 60 * 24 * 365 # considers 365d in a year
|
|
||||||
elif "m" not in value:
|
|
||||||
raise ValueError("Invalid duration value", value)
|
|
||||||
|
|
||||||
if target == "hours":
|
|
||||||
return num / 60
|
|
||||||
if target == "days":
|
|
||||||
return num / 60 / 24
|
|
||||||
|
|
||||||
# target == "minutes"
|
|
||||||
return num
|
|
||||||
|
|
|
@ -1,20 +1,165 @@
|
||||||
import ssl
|
import ssl
|
||||||
import smtplib
|
import smtplib
|
||||||
|
from email.message import EmailMessage
|
||||||
|
|
||||||
from typing import List
|
from typing import List
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
|
import apprise
|
||||||
import httpx
|
import httpx
|
||||||
|
|
||||||
from argos.checks.base import Severity
|
from argos.checks.base import Severity
|
||||||
from argos.logging import logger
|
from argos.logging import logger
|
||||||
from argos.schemas.config import Config, Mail, GotifyUrl
|
from argos.schemas.config import Config, Mail, GotifyUrl
|
||||||
|
from argos.server.models import Task
|
||||||
# XXX Implement mail alerts https://framagit.org/framasoft/framaspace/argos/-/issues/15
|
|
||||||
# XXX Implement gotify alerts https://framagit.org/framasoft/framaspace/argos/-/issues/16
|
|
||||||
|
|
||||||
|
|
||||||
def handle_alert(config: Config, result, task, severity, old_severity, request):
|
def need_alert(
|
||||||
|
last_severity: str, last_severity_update, severity: str, status: str, task: Task
|
||||||
|
) -> bool:
|
||||||
|
## Create alert… or not!
|
||||||
|
send_notif = False
|
||||||
|
# Severity has changed, and no retry before notification
|
||||||
|
if last_severity != severity and task.retry_before_notification == 0:
|
||||||
|
send_notif = True
|
||||||
|
# Seems to be a first check: create a notification
|
||||||
|
elif last_severity != severity and last_severity_update is None:
|
||||||
|
send_notif = True
|
||||||
|
# As we created a notification, avoid resending it on a
|
||||||
|
# future failure
|
||||||
|
if status != "success":
|
||||||
|
task.contiguous_failures = task.retry_before_notification
|
||||||
|
# We need retry before notification, so the severity may not have changed
|
||||||
|
# since last check
|
||||||
|
elif task.retry_before_notification != 0:
|
||||||
|
# If we got a success, and we already have created a notification:
|
||||||
|
# create notification of success immediately
|
||||||
|
if (
|
||||||
|
status == "success"
|
||||||
|
and task.contiguous_failures >= task.retry_before_notification + 1
|
||||||
|
):
|
||||||
|
send_notif = True
|
||||||
|
task.contiguous_failures = 0
|
||||||
|
# The status is not a success
|
||||||
|
elif status != "success":
|
||||||
|
# This is a new failure
|
||||||
|
task.contiguous_failures += 1
|
||||||
|
# Severity has changed, but not to success, that’s odd:
|
||||||
|
# create a notification
|
||||||
|
if (
|
||||||
|
last_severity not in ("ok", severity)
|
||||||
|
and last_severity_update is not None
|
||||||
|
):
|
||||||
|
send_notif = True
|
||||||
|
# As we created a notification, avoid resending it on a
|
||||||
|
# future failure
|
||||||
|
task.contiguous_failures = task.retry_before_notification
|
||||||
|
# Severity has not changed, but there has been enough failures
|
||||||
|
# to create a notification
|
||||||
|
elif task.contiguous_failures == task.retry_before_notification + 1:
|
||||||
|
send_notif = True
|
||||||
|
|
||||||
|
return send_notif
|
||||||
|
|
||||||
|
|
||||||
|
def get_icon_from_severity(severity: str) -> str:
|
||||||
|
icon = "❌"
|
||||||
|
if severity == Severity.OK:
|
||||||
|
icon = "✅"
|
||||||
|
elif severity == Severity.WARNING:
|
||||||
|
icon = "⚠️"
|
||||||
|
elif severity == Severity.UNKNOWN:
|
||||||
|
icon = "❔"
|
||||||
|
|
||||||
|
return icon
|
||||||
|
|
||||||
|
|
||||||
|
def send_mail(mail: EmailMessage, config: Mail):
|
||||||
|
"""Send message by mail"""
|
||||||
|
|
||||||
|
if config.ssl:
|
||||||
|
logger.debug("Mail notification: SSL")
|
||||||
|
context = ssl.create_default_context()
|
||||||
|
smtp = smtplib.SMTP_SSL(host=config.host, port=config.port, context=context)
|
||||||
|
else:
|
||||||
|
smtp = smtplib.SMTP(
|
||||||
|
host=config.host, # type: ignore
|
||||||
|
port=config.port,
|
||||||
|
)
|
||||||
|
if config.starttls:
|
||||||
|
logger.debug("Mail notification: STARTTLS")
|
||||||
|
context = ssl.create_default_context()
|
||||||
|
smtp.starttls(context=context)
|
||||||
|
|
||||||
|
if config.auth is not None:
|
||||||
|
logger.debug("Mail notification: authentification")
|
||||||
|
smtp.login(config.auth.login, config.auth.password)
|
||||||
|
|
||||||
|
for address in config.addresses:
|
||||||
|
logger.debug("Sending mail to %s", address)
|
||||||
|
logger.debug(mail.get_body())
|
||||||
|
smtp.send_message(mail, to_addrs=address)
|
||||||
|
|
||||||
|
|
||||||
|
def send_gotify_msg(config, payload):
|
||||||
|
"""Send message with gotify"""
|
||||||
|
headers = {"accept": "application/json", "content-type": "application/json"}
|
||||||
|
|
||||||
|
for url in config:
|
||||||
|
logger.debug("Sending gotify message(s) to %s", url.url)
|
||||||
|
for token in url.tokens:
|
||||||
|
try:
|
||||||
|
res = httpx.post(
|
||||||
|
f"{url.url}message",
|
||||||
|
params={"token": token},
|
||||||
|
headers=headers,
|
||||||
|
json=payload,
|
||||||
|
)
|
||||||
|
res.raise_for_status()
|
||||||
|
except httpx.RequestError as err:
|
||||||
|
logger.error(
|
||||||
|
"An error occurred while sending a message to %s with token %s",
|
||||||
|
err.request.url,
|
||||||
|
token,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def no_agent_alert(config: Config):
|
||||||
|
"""Alert"""
|
||||||
|
msg = "You should check what’s going on with your Argos agents."
|
||||||
|
twa = config.recurring_tasks.time_without_agent
|
||||||
|
if twa > 1:
|
||||||
|
subject = f"No agent has been seen within the last {twa} minutes"
|
||||||
|
else:
|
||||||
|
subject = "No agent has been seen within the last minute"
|
||||||
|
|
||||||
|
if "local" in config.general.alerts.no_agent:
|
||||||
|
logger.error(subject)
|
||||||
|
|
||||||
|
if config.general.mail is not None and "mail" in config.general.alerts.no_agent:
|
||||||
|
mail = EmailMessage()
|
||||||
|
mail["Subject"] = f"[Argos] {subject}"
|
||||||
|
mail["From"] = config.general.mail.mailfrom
|
||||||
|
mail.set_content(msg)
|
||||||
|
send_mail(mail, config.general.mail)
|
||||||
|
|
||||||
|
if config.general.gotify is not None and "gotify" in config.general.alerts.no_agent:
|
||||||
|
priority = 9
|
||||||
|
payload = {"title": subject, "message": msg, "priority": priority}
|
||||||
|
send_gotify_msg(config.general.gotify, payload)
|
||||||
|
|
||||||
|
if config.general.apprise is not None:
|
||||||
|
for notif_way in config.general.alerts.no_agent:
|
||||||
|
if notif_way.startswith("apprise:"):
|
||||||
|
group = notif_way[8:]
|
||||||
|
apobj = apprise.Apprise()
|
||||||
|
for channel in config.general.apprise[group]:
|
||||||
|
apobj.add(channel)
|
||||||
|
|
||||||
|
apobj.notify(title=subject, body=msg)
|
||||||
|
|
||||||
|
|
||||||
|
def handle_alert(config: Config, result, task, severity, old_severity, request): # pylint: disable-msg=too-many-positional-arguments
|
||||||
"""Dispatch alert through configured alert channels"""
|
"""Dispatch alert through configured alert channels"""
|
||||||
|
|
||||||
if "local" in getattr(config.general.alerts, severity):
|
if "local" in getattr(config.general.alerts, severity):
|
||||||
|
@ -39,93 +184,110 @@ def handle_alert(config: Config, result, task, severity, old_severity, request):
|
||||||
result, task, severity, old_severity, config.general.gotify, request
|
result, task, severity, old_severity, config.general.gotify, request
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if config.general.apprise is not None:
|
||||||
|
for notif_way in getattr(config.general.alerts, severity):
|
||||||
|
if notif_way.startswith("apprise:"):
|
||||||
|
group = notif_way[8:]
|
||||||
|
notify_with_apprise(
|
||||||
|
result,
|
||||||
|
task,
|
||||||
|
severity,
|
||||||
|
old_severity,
|
||||||
|
config.general.apprise[group],
|
||||||
|
request,
|
||||||
|
)
|
||||||
|
|
||||||
def notify_by_mail(
|
|
||||||
|
def notify_with_apprise( # pylint: disable-msg=too-many-positional-arguments
|
||||||
|
result, task, severity: str, old_severity: str, group: List[str], request
|
||||||
|
) -> None:
|
||||||
|
logger.debug("Will send apprise notification")
|
||||||
|
|
||||||
|
apobj = apprise.Apprise()
|
||||||
|
for channel in group:
|
||||||
|
apobj.add(channel)
|
||||||
|
|
||||||
|
icon = get_icon_from_severity(severity)
|
||||||
|
title = f"[Argos] {icon} {urlparse(task.url).netloc} (IPv{task.ip_version}): status {severity}"
|
||||||
|
msg = f"""\
|
||||||
|
URL: {task.url} (IPv{task.ip_version})
|
||||||
|
Check: {task.check}
|
||||||
|
Status: {severity}
|
||||||
|
Time: {result.submitted_at}
|
||||||
|
Previous status: {old_severity}
|
||||||
|
|
||||||
|
See result on {request.url_for('get_result_view', result_id=result.id)}
|
||||||
|
|
||||||
|
See results of task on {request.url_for('get_task_results_view', task_id=task.id)}#{result.id}
|
||||||
|
"""
|
||||||
|
|
||||||
|
apobj.notify(title=title, body=msg)
|
||||||
|
|
||||||
|
|
||||||
|
def notify_by_mail( # pylint: disable-msg=too-many-positional-arguments
|
||||||
result, task, severity: str, old_severity: str, config: Mail, request
|
result, task, severity: str, old_severity: str, config: Mail, request
|
||||||
) -> None:
|
) -> None:
|
||||||
logger.debug("Will send mail notification")
|
logger.debug("Will send mail notification")
|
||||||
|
|
||||||
|
icon = get_icon_from_severity(severity)
|
||||||
msg = f"""\
|
msg = f"""\
|
||||||
URL: {task.url}
|
URL: {task.url} (IPv{task.ip_version})
|
||||||
Check: {task.check}
|
Check: {task.check}
|
||||||
Status: {severity}
|
Status: {severity}
|
||||||
Time: {result.submitted_at}
|
Time: {result.submitted_at}
|
||||||
Previous status: {old_severity}
|
Previous status: {old_severity}
|
||||||
|
|
||||||
See results of task on {request.url_for('get_task_results_view', task_id=task.id)}
|
See result on {request.url_for('get_result_view', result_id=result.id)}
|
||||||
|
|
||||||
|
See results of task on {request.url_for('get_task_results_view', task_id=task.id)}#{result.id}
|
||||||
"""
|
"""
|
||||||
|
|
||||||
mail = f"""\
|
mail = EmailMessage()
|
||||||
Subject: [Argos] {urlparse(task.url).netloc}: status {severity}
|
mail[
|
||||||
|
"Subject"
|
||||||
{msg}"""
|
] = f"[Argos] {icon} {urlparse(task.url).netloc} (IPv{task.ip_version}): status {severity}"
|
||||||
|
mail["From"] = config.mailfrom
|
||||||
if config.ssl:
|
mail.set_content(msg)
|
||||||
logger.debug("Mail notification: SSL")
|
send_mail(mail, config)
|
||||||
context = ssl.create_default_context()
|
|
||||||
smtp = smtplib.SMTP_SSL(host=config.host, port=config.port, context=context)
|
|
||||||
else:
|
|
||||||
smtp = smtplib.SMTP(
|
|
||||||
host=config.host, # type: ignore
|
|
||||||
port=config.port,
|
|
||||||
)
|
|
||||||
if config.starttls:
|
|
||||||
logger.debug("Mail notification: STARTTLS")
|
|
||||||
context = ssl.create_default_context()
|
|
||||||
smtp.starttls(context=context)
|
|
||||||
|
|
||||||
if config.auth is not None:
|
|
||||||
logger.debug("Mail notification: authentification")
|
|
||||||
smtp.login(config.auth.login, config.auth.password)
|
|
||||||
|
|
||||||
for address in config.addresses:
|
|
||||||
logger.debug("Sending mail to %s", address)
|
|
||||||
logger.debug(msg)
|
|
||||||
smtp.sendmail(config.mailfrom, address, mail)
|
|
||||||
|
|
||||||
|
|
||||||
def notify_with_gotify(
|
def notify_with_gotify( # pylint: disable-msg=too-many-positional-arguments
|
||||||
result, task, severity: str, old_severity: str, config: List[GotifyUrl], request
|
result, task, severity: str, old_severity: str, config: List[GotifyUrl], request
|
||||||
) -> None:
|
) -> None:
|
||||||
logger.debug("Will send gotify notification")
|
logger.debug("Will send gotify notification")
|
||||||
headers = {"accept": "application/json", "content-type": "application/json"}
|
|
||||||
|
|
||||||
|
icon = get_icon_from_severity(severity)
|
||||||
priority = 9
|
priority = 9
|
||||||
icon = "❌"
|
|
||||||
if severity == Severity.OK:
|
if severity == Severity.OK:
|
||||||
priority = 1
|
priority = 1
|
||||||
icon = "✅"
|
|
||||||
elif severity == Severity.WARNING:
|
elif severity == Severity.WARNING:
|
||||||
priority = 5
|
priority = 5
|
||||||
icon = "⚠️"
|
elif severity == Severity.UNKNOWN:
|
||||||
|
priority = 5
|
||||||
|
|
||||||
subject = f"{icon} {urlparse(task.url).netloc}: status {severity}"
|
subject = (
|
||||||
|
f"{icon} {urlparse(task.url).netloc} (IPv{task.ip_version}): status {severity}"
|
||||||
|
)
|
||||||
msg = f"""\
|
msg = f"""\
|
||||||
URL: {task.url}
|
URL: <{task.url}> (IPv{task.ip_version})\\
|
||||||
Check: {task.check}
|
Check: {task.check}\\
|
||||||
Status: {severity}
|
Status: {severity}\\
|
||||||
Time: {result.submitted_at}
|
Time: {result.submitted_at}\\
|
||||||
Previous status: {old_severity}
|
Previous status: {old_severity}\\
|
||||||
|
\\
|
||||||
See results of task on {request.url_for('get_task_results_view', task_id=task.id)}
|
See result on <{request.url_for('get_result_view', result_id=result.id)}>\\
|
||||||
|
\\
|
||||||
|
See results of task on <{request.url_for('get_task_results_view', task_id=task.id)}#{result.id}>
|
||||||
"""
|
"""
|
||||||
|
extras = {
|
||||||
|
"client::display": {"contentType": "text/markdown"},
|
||||||
|
"client::notification": {
|
||||||
|
"click": {
|
||||||
|
"url": f"{request.url_for('get_result_view', result_id=result.id)}"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
payload = {"title": subject, "message": msg, "priority": priority}
|
payload = {"title": subject, "message": msg, "priority": priority, "extras": extras}
|
||||||
|
|
||||||
for url in config:
|
send_gotify_msg(config, payload)
|
||||||
logger.debug("Sending gotify message(s) to %s", url)
|
|
||||||
for token in url.tokens:
|
|
||||||
try:
|
|
||||||
res = httpx.post(
|
|
||||||
f"{url.url}message",
|
|
||||||
params={"token": token},
|
|
||||||
headers=headers,
|
|
||||||
json=payload,
|
|
||||||
)
|
|
||||||
res.raise_for_status()
|
|
||||||
except httpx.RequestError as err:
|
|
||||||
logger.error(
|
|
||||||
"An error occurred while sending a message to %s with token %s",
|
|
||||||
err.request.url,
|
|
||||||
token,
|
|
||||||
)
|
|
||||||
|
|
16
argos/server/exceptions.py
Normal file
16
argos/server/exceptions.py
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
from fastapi import Request
|
||||||
|
from fastapi.responses import RedirectResponse
|
||||||
|
|
||||||
|
|
||||||
|
class NotAuthenticatedException(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def auth_exception_handler(request: Request, exc: NotAuthenticatedException):
|
||||||
|
"""
|
||||||
|
Redirect the user to the login page if not logged in
|
||||||
|
"""
|
||||||
|
response = RedirectResponse(url=request.url_for("login_view"))
|
||||||
|
manager = request.app.state.manager
|
||||||
|
manager.set_cookie(response, "")
|
||||||
|
return response
|
|
@ -1,107 +1,96 @@
|
||||||
import os
|
import os
|
||||||
import sys
|
from contextlib import asynccontextmanager
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
from fastapi import FastAPI
|
from fastapi import FastAPI
|
||||||
from fastapi.staticfiles import StaticFiles
|
from fastapi.staticfiles import StaticFiles
|
||||||
from pydantic import ValidationError
|
from fastapi_login import LoginManager
|
||||||
|
from fastapi_utils.tasks import repeat_every
|
||||||
|
from psutil import Process
|
||||||
from sqlalchemy import create_engine, event
|
from sqlalchemy import create_engine, event
|
||||||
from sqlalchemy.orm import sessionmaker
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
|
||||||
from argos.logging import logger
|
from argos.logging import logger, set_log_level
|
||||||
from argos.server import models, routes
|
from argos.server import models, routes, queries
|
||||||
from argos.server.settings import get_app_settings, read_yaml_config
|
from argos.server.alerting import no_agent_alert
|
||||||
|
from argos.server.exceptions import NotAuthenticatedException, auth_exception_handler
|
||||||
|
from argos.server.settings import read_config
|
||||||
|
|
||||||
|
|
||||||
def get_application() -> FastAPI:
|
def get_application() -> FastAPI:
|
||||||
"""Spawn Argos FastAPI server"""
|
"""Spawn Argos FastAPI server"""
|
||||||
settings = get_app_settings()
|
config_file = os.environ["ARGOS_YAML_FILE"]
|
||||||
appli = FastAPI()
|
config = read_config(config_file)
|
||||||
|
|
||||||
config = read_config(appli, settings)
|
root_path = config.general.root_path
|
||||||
|
|
||||||
|
if root_path != "":
|
||||||
|
logger.info("Root path for Argos: %s", root_path)
|
||||||
|
if root_path.endswith("/"):
|
||||||
|
root_path = root_path[:-1]
|
||||||
|
logger.info("Fixed root path for Argos: %s", root_path)
|
||||||
|
|
||||||
|
appli = FastAPI(lifespan=lifespan, root_path=root_path)
|
||||||
|
|
||||||
# Settings is the pydantic settings object
|
|
||||||
# Config is the argos config object (built from yaml)
|
# Config is the argos config object (built from yaml)
|
||||||
appli.state.config = config
|
appli.state.config = config
|
||||||
appli.state.settings = settings
|
appli.add_exception_handler(NotAuthenticatedException, auth_exception_handler)
|
||||||
|
appli.state.manager = create_manager(config.general.cookie_secret)
|
||||||
|
|
||||||
|
if config.general.ldap is not None:
|
||||||
|
import ldap
|
||||||
|
|
||||||
|
appli.state.ldap = ldap.initialize(config.general.ldap.uri)
|
||||||
|
|
||||||
|
@appli.state.manager.user_loader()
|
||||||
|
async def query_user(user: str) -> None | str | models.User:
|
||||||
|
"""
|
||||||
|
Get a user from the db or LDAP
|
||||||
|
:param user: name of the user
|
||||||
|
:return: None or the user object
|
||||||
|
"""
|
||||||
|
if appli.state.config.general.ldap is not None:
|
||||||
|
from argos.server.routes.dependencies import find_ldap_user
|
||||||
|
|
||||||
|
return await find_ldap_user(appli.state.config, appli.state.ldap, user)
|
||||||
|
|
||||||
|
return await queries.get_user(appli.state.db, user)
|
||||||
|
|
||||||
appli.add_event_handler(
|
|
||||||
"startup",
|
|
||||||
create_start_app_handler(appli),
|
|
||||||
)
|
|
||||||
appli.add_event_handler(
|
|
||||||
"shutdown",
|
|
||||||
create_stop_app_handler(appli),
|
|
||||||
)
|
|
||||||
appli.include_router(routes.api, prefix="/api")
|
appli.include_router(routes.api, prefix="/api")
|
||||||
appli.include_router(routes.views)
|
appli.include_router(routes.views)
|
||||||
|
|
||||||
static_dir = os.path.join(os.path.dirname(__file__), "static")
|
static_dir = Path(__file__).resolve().parent / "static"
|
||||||
|
|
||||||
appli.mount("/static", StaticFiles(directory=static_dir), name="static")
|
appli.mount("/static", StaticFiles(directory=static_dir), name="static")
|
||||||
return appli
|
return appli
|
||||||
|
|
||||||
|
|
||||||
def create_start_app_handler(appli):
|
|
||||||
"""Warmup the server:
|
|
||||||
setup database connection
|
|
||||||
"""
|
|
||||||
|
|
||||||
async def _get_db():
|
|
||||||
setup_database(appli)
|
|
||||||
|
|
||||||
return await connect_to_db(appli)
|
|
||||||
|
|
||||||
return _get_db
|
|
||||||
|
|
||||||
|
|
||||||
async def connect_to_db(appli):
|
async def connect_to_db(appli):
|
||||||
appli.state.db = appli.state.SessionLocal()
|
appli.state.db = appli.state.SessionLocal()
|
||||||
return appli.state.db
|
return appli.state.db
|
||||||
|
|
||||||
|
|
||||||
def create_stop_app_handler(appli):
|
|
||||||
"""Gracefully shutdown the server:
|
|
||||||
close database connection.
|
|
||||||
"""
|
|
||||||
|
|
||||||
async def stop_app():
|
|
||||||
appli.state.db.close()
|
|
||||||
|
|
||||||
return stop_app
|
|
||||||
|
|
||||||
|
|
||||||
def read_config(appli, settings):
|
|
||||||
try:
|
|
||||||
config = read_yaml_config(settings.yaml_file)
|
|
||||||
appli.state.config = config
|
|
||||||
return config
|
|
||||||
except ValidationError as err:
|
|
||||||
logger.error("Errors where found while reading configuration:")
|
|
||||||
for error in err.errors():
|
|
||||||
logger.error("%s is %s", error["loc"], error["type"])
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
|
|
||||||
def setup_database(appli):
|
def setup_database(appli):
|
||||||
settings = appli.state.settings
|
config = appli.state.config
|
||||||
|
db_url = str(config.general.db.url)
|
||||||
|
logger.debug("Using database URL %s", db_url)
|
||||||
# For sqlite, we need to add connect_args={"check_same_thread": False}
|
# For sqlite, we need to add connect_args={"check_same_thread": False}
|
||||||
logger.debug("Using database URL %s", settings.database_url)
|
if config.general.env == "production" and db_url.startswith("sqlite:////tmp"):
|
||||||
if settings.database_url.startswith("sqlite:////tmp"):
|
|
||||||
logger.warning("Using sqlite in /tmp is not recommended for production")
|
logger.warning("Using sqlite in /tmp is not recommended for production")
|
||||||
|
|
||||||
extra_settings = {}
|
extra_settings = {}
|
||||||
if settings.db_pool_size:
|
if config.general.db.pool_size:
|
||||||
extra_settings.setdefault("pool_size", settings.db_pool_size)
|
extra_settings.setdefault("pool_size", config.general.db.pool_size)
|
||||||
|
|
||||||
if settings.db_max_overflow:
|
if config.general.db.max_overflow:
|
||||||
extra_settings.setdefault("max_overflow", settings.db_max_overflow)
|
extra_settings.setdefault("max_overflow", config.general.db.max_overflow)
|
||||||
|
|
||||||
engine = create_engine(settings.database_url, **extra_settings)
|
engine = create_engine(db_url, **extra_settings)
|
||||||
|
|
||||||
def _fk_pragma_on_connect(dbapi_con, con_record):
|
def _fk_pragma_on_connect(dbapi_con, con_record):
|
||||||
dbapi_con.execute("pragma foreign_keys=ON")
|
dbapi_con.execute("pragma foreign_keys=ON")
|
||||||
|
|
||||||
if settings.database_url.startswith("sqlite:////"):
|
if db_url.startswith("sqlite:///"):
|
||||||
event.listen(engine, "connect", _fk_pragma_on_connect)
|
event.listen(engine, "connect", _fk_pragma_on_connect)
|
||||||
|
|
||||||
appli.state.SessionLocal = sessionmaker(
|
appli.state.SessionLocal = sessionmaker(
|
||||||
|
@ -111,4 +100,80 @@ def setup_database(appli):
|
||||||
models.Base.metadata.create_all(bind=engine)
|
models.Base.metadata.create_all(bind=engine)
|
||||||
|
|
||||||
|
|
||||||
|
def create_manager(cookie_secret: str) -> LoginManager:
|
||||||
|
if cookie_secret == "foo_bar_baz":
|
||||||
|
logger.warning(
|
||||||
|
"You should change the cookie_secret secret in your configuration file."
|
||||||
|
)
|
||||||
|
return LoginManager(
|
||||||
|
cookie_secret,
|
||||||
|
"/login",
|
||||||
|
use_cookie=True,
|
||||||
|
use_header=False,
|
||||||
|
not_authenticated_exception=NotAuthenticatedException,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@repeat_every(seconds=120, logger=logger)
|
||||||
|
async def recurring_tasks() -> None:
|
||||||
|
"""Recurring DB cleanup and watch-agents tasks"""
|
||||||
|
# If we are using gunicorn
|
||||||
|
if not hasattr(app.state, "SessionLocal"):
|
||||||
|
parent_process = Process(os.getppid())
|
||||||
|
children = parent_process.children(recursive=True)
|
||||||
|
# Start the task only once, not for every worker
|
||||||
|
if children[0].pid == os.getpid():
|
||||||
|
# and we need to setup database engine
|
||||||
|
setup_database(app)
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
set_log_level("info", quiet=True)
|
||||||
|
logger.info("Start background recurring tasks")
|
||||||
|
|
||||||
|
with app.state.SessionLocal() as db:
|
||||||
|
config = app.state.config.recurring_tasks
|
||||||
|
|
||||||
|
agents = await queries.get_recent_agents_count(db, config.time_without_agent)
|
||||||
|
if agents == 0:
|
||||||
|
no_agent_alert(app.state.config)
|
||||||
|
logger.info("Agent presence checked")
|
||||||
|
|
||||||
|
removed = await queries.remove_old_results(db, config.max_results_age)
|
||||||
|
logger.info("%i result(s) removed", removed)
|
||||||
|
|
||||||
|
updated = await queries.release_old_locks(db, config.max_lock_seconds)
|
||||||
|
logger.info("%i lock(s) released", updated)
|
||||||
|
|
||||||
|
processed_jobs = await queries.process_jobs(db)
|
||||||
|
logger.info("%i job(s) processed", processed_jobs)
|
||||||
|
|
||||||
|
logger.info("Background recurring tasks ended")
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
@asynccontextmanager
|
||||||
|
async def lifespan(appli: FastAPI):
|
||||||
|
"""Server start and stop actions
|
||||||
|
|
||||||
|
Setup database connection then close it at shutdown.
|
||||||
|
"""
|
||||||
|
setup_database(appli)
|
||||||
|
|
||||||
|
db = await connect_to_db(appli)
|
||||||
|
|
||||||
|
tasks_count = await queries.count_tasks(db)
|
||||||
|
if tasks_count == 0:
|
||||||
|
logger.warning(
|
||||||
|
"There is no tasks in the database. "
|
||||||
|
'Please launch the command "argos server reload-config"'
|
||||||
|
)
|
||||||
|
await recurring_tasks()
|
||||||
|
|
||||||
|
yield
|
||||||
|
|
||||||
|
appli.state.db.close()
|
||||||
|
|
||||||
|
|
||||||
app = get_application()
|
app = get_application()
|
||||||
|
|
|
@ -0,0 +1,37 @@
|
||||||
|
"""Add recheck delay
|
||||||
|
|
||||||
|
Revision ID: 127d74c770bb
|
||||||
|
Revises: dcf73fa19fce
|
||||||
|
Create Date: 2024-11-27 16:04:58.138768
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "127d74c770bb"
|
||||||
|
down_revision: Union[str, None] = "dcf73fa19fce"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
with op.batch_alter_table("tasks", schema=None) as batch_op:
|
||||||
|
batch_op.add_column(sa.Column("recheck_delay", sa.Float(), nullable=True))
|
||||||
|
batch_op.add_column(
|
||||||
|
sa.Column(
|
||||||
|
"already_retried",
|
||||||
|
sa.Boolean(),
|
||||||
|
nullable=False,
|
||||||
|
server_default=sa.sql.false(),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
with op.batch_alter_table("tasks", schema=None) as batch_op:
|
||||||
|
batch_op.drop_column("already_retried")
|
||||||
|
batch_op.drop_column("recheck_delay")
|
|
@ -0,0 +1,28 @@
|
||||||
|
"""Add request data to tasks
|
||||||
|
|
||||||
|
Revision ID: 31255a412d63
|
||||||
|
Revises: 80a29f64f91c
|
||||||
|
Create Date: 2024-12-09 16:40:20.926138
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "31255a412d63"
|
||||||
|
down_revision: Union[str, None] = "80a29f64f91c"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
with op.batch_alter_table("tasks", schema=None) as batch_op:
|
||||||
|
batch_op.add_column(sa.Column("request_data", sa.String(), nullable=True))
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
with op.batch_alter_table("tasks", schema=None) as batch_op:
|
||||||
|
batch_op.drop_column("request_data")
|
|
@ -0,0 +1,36 @@
|
||||||
|
"""Add job queue
|
||||||
|
|
||||||
|
Revision ID: 5f6cb30db996
|
||||||
|
Revises: bd4b4962696a
|
||||||
|
Create Date: 2025-02-17 16:56:36.673511
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "5f6cb30db996"
|
||||||
|
down_revision: Union[str, None] = "bd4b4962696a"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
op.create_table(
|
||||||
|
"jobs",
|
||||||
|
sa.Column("id", sa.Integer(), nullable=False),
|
||||||
|
sa.Column("todo", sa.Enum("RELOAD_CONFIG", name="todo_enum"), nullable=False),
|
||||||
|
sa.Column("args", sa.String(), nullable=False),
|
||||||
|
sa.Column(
|
||||||
|
"current", sa.Boolean(), server_default=sa.sql.false(), nullable=False
|
||||||
|
),
|
||||||
|
sa.Column("added_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.PrimaryKeyConstraint("id"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
op.drop_table("jobs")
|
|
@ -0,0 +1,34 @@
|
||||||
|
"""Add IP version to checks
|
||||||
|
|
||||||
|
Revision ID: 64f73a79b7d8
|
||||||
|
Revises: a1e98cf72a5c
|
||||||
|
Create Date: 2024-12-02 14:12:40.558033
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
from sqlalchemy.dialects.postgresql import ENUM
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "64f73a79b7d8"
|
||||||
|
down_revision: Union[str, None] = "a1e98cf72a5c"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
enum = ENUM("4", "6", name="ip_version_enum", create_type=False)
|
||||||
|
enum.create(op.get_bind(), checkfirst=True)
|
||||||
|
with op.batch_alter_table("tasks", schema=None) as batch_op:
|
||||||
|
batch_op.add_column(
|
||||||
|
sa.Column("ip_version", enum, server_default="4", nullable=False)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
with op.batch_alter_table("tasks", schema=None) as batch_op:
|
||||||
|
batch_op.drop_column("ip_version")
|
||||||
|
ENUM(name="ip_version_enum").drop(op.get_bind(), checkfirst=True)
|
|
@ -0,0 +1,41 @@
|
||||||
|
"""Add retries before notification feature
|
||||||
|
|
||||||
|
Revision ID: 80a29f64f91c
|
||||||
|
Revises: 8b58ced14d6e
|
||||||
|
Create Date: 2024-12-04 17:03:35.104368
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "80a29f64f91c"
|
||||||
|
down_revision: Union[str, None] = "8b58ced14d6e"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
with op.batch_alter_table("tasks", schema=None) as batch_op:
|
||||||
|
batch_op.add_column(
|
||||||
|
sa.Column(
|
||||||
|
"retry_before_notification",
|
||||||
|
sa.Integer(),
|
||||||
|
server_default="0",
|
||||||
|
nullable=False,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
batch_op.add_column(
|
||||||
|
sa.Column(
|
||||||
|
"contiguous_failures", sa.Integer(), server_default="0", nullable=False
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
with op.batch_alter_table("tasks", schema=None) as batch_op:
|
||||||
|
batch_op.drop_column("contiguous_failures")
|
||||||
|
batch_op.drop_column("retry_before_notification")
|
|
@ -0,0 +1,35 @@
|
||||||
|
"""Add task index
|
||||||
|
|
||||||
|
Revision ID: 8b58ced14d6e
|
||||||
|
Revises: 64f73a79b7d8
|
||||||
|
Create Date: 2024-12-03 16:41:44.842213
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "8b58ced14d6e"
|
||||||
|
down_revision: Union[str, None] = "64f73a79b7d8"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
with op.batch_alter_table("tasks", schema=None) as batch_op:
|
||||||
|
batch_op.add_column(sa.Column("task_group", sa.String(), nullable=True))
|
||||||
|
with op.batch_alter_table("tasks", schema=None) as batch_op:
|
||||||
|
batch_op.execute(
|
||||||
|
"UPDATE tasks SET task_group = method || '-' || ip_version || '-' || url"
|
||||||
|
)
|
||||||
|
batch_op.alter_column("task_group", nullable=False)
|
||||||
|
batch_op.create_index("similar_tasks", ["task_group"], unique=False)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
with op.batch_alter_table("tasks", schema=None) as batch_op:
|
||||||
|
batch_op.drop_index("similar_tasks")
|
||||||
|
batch_op.drop_column("task_group")
|
|
@ -0,0 +1,38 @@
|
||||||
|
"""Make frequency a float
|
||||||
|
|
||||||
|
Revision ID: a1e98cf72a5c
|
||||||
|
Revises: 127d74c770bb
|
||||||
|
Create Date: 2024-11-27 16:10:13.000705
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "a1e98cf72a5c"
|
||||||
|
down_revision: Union[str, None] = "127d74c770bb"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
with op.batch_alter_table("tasks", schema=None) as batch_op:
|
||||||
|
batch_op.alter_column(
|
||||||
|
"frequency",
|
||||||
|
existing_type=sa.INTEGER(),
|
||||||
|
type_=sa.Float(),
|
||||||
|
existing_nullable=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
with op.batch_alter_table("tasks", schema=None) as batch_op:
|
||||||
|
batch_op.alter_column(
|
||||||
|
"frequency",
|
||||||
|
existing_type=sa.Float(),
|
||||||
|
type_=sa.INTEGER(),
|
||||||
|
existing_nullable=False,
|
||||||
|
)
|
|
@ -0,0 +1,42 @@
|
||||||
|
"""Use bigint for results id field
|
||||||
|
|
||||||
|
Revision ID: bd4b4962696a
|
||||||
|
Revises: 31255a412d63
|
||||||
|
Create Date: 2025-01-06 11:44:37.552965
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "bd4b4962696a"
|
||||||
|
down_revision: Union[str, None] = "31255a412d63"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
bind = op.get_bind()
|
||||||
|
if bind.engine.name != "sqlite":
|
||||||
|
with op.batch_alter_table("results", schema=None) as batch_op:
|
||||||
|
batch_op.alter_column(
|
||||||
|
"id",
|
||||||
|
existing_type=sa.INTEGER(),
|
||||||
|
type_=sa.BigInteger(),
|
||||||
|
existing_nullable=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
bind = op.get_bind()
|
||||||
|
if bind.engine.name != "sqlite":
|
||||||
|
with op.batch_alter_table("results", schema=None) as batch_op:
|
||||||
|
batch_op.alter_column(
|
||||||
|
"id",
|
||||||
|
existing_type=sa.BigInteger(),
|
||||||
|
type_=sa.INTEGER(),
|
||||||
|
existing_nullable=False,
|
||||||
|
)
|
|
@ -0,0 +1,35 @@
|
||||||
|
"""Add users table
|
||||||
|
|
||||||
|
Revision ID: c780864dc407
|
||||||
|
Revises: defda3f2952d
|
||||||
|
Create Date: 2024-06-10 16:31:17.296983
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "c780864dc407"
|
||||||
|
down_revision: Union[str, None] = "defda3f2952d"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
op.create_table(
|
||||||
|
"users",
|
||||||
|
sa.Column("username", sa.String(), nullable=False),
|
||||||
|
sa.Column("password", sa.String(), nullable=False),
|
||||||
|
sa.Column("disabled", sa.Boolean(), nullable=False),
|
||||||
|
sa.Column("created_at", sa.DateTime(), nullable=False),
|
||||||
|
sa.Column("updated_at", sa.DateTime(), nullable=True),
|
||||||
|
sa.Column("last_login_at", sa.DateTime(), nullable=True),
|
||||||
|
sa.PrimaryKeyConstraint("username"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
op.drop_table("users")
|
|
@ -0,0 +1,51 @@
|
||||||
|
"""Specify check method
|
||||||
|
|
||||||
|
Revision ID: dcf73fa19fce
|
||||||
|
Revises: c780864dc407
|
||||||
|
Create Date: 2024-11-26 14:40:27.510587
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = "dcf73fa19fce"
|
||||||
|
down_revision: Union[str, None] = "c780864dc407"
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
enum = sa.Enum(
|
||||||
|
"GET",
|
||||||
|
"HEAD",
|
||||||
|
"POST",
|
||||||
|
"OPTIONS",
|
||||||
|
"CONNECT",
|
||||||
|
"TRACE",
|
||||||
|
"PUT",
|
||||||
|
"PATCH",
|
||||||
|
"DELETE",
|
||||||
|
name="method",
|
||||||
|
create_type=False,
|
||||||
|
)
|
||||||
|
enum.create(op.get_bind(), checkfirst=True)
|
||||||
|
with op.batch_alter_table("tasks", schema=None) as batch_op:
|
||||||
|
batch_op.add_column(
|
||||||
|
sa.Column(
|
||||||
|
"method",
|
||||||
|
enum,
|
||||||
|
nullable=False,
|
||||||
|
server_default="GET",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
with op.batch_alter_table("tasks", schema=None) as batch_op:
|
||||||
|
batch_op.drop_column("method")
|
||||||
|
sa.Enum(name="method").drop(op.get_bind(), checkfirst=True)
|
|
@ -1,6 +1,7 @@
|
||||||
"""Database models"""
|
"""Database models"""
|
||||||
|
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
|
from hashlib import md5
|
||||||
from typing import List, Literal
|
from typing import List, Literal
|
||||||
|
|
||||||
from sqlalchemy import (
|
from sqlalchemy import (
|
||||||
|
@ -9,15 +10,42 @@ from sqlalchemy import (
|
||||||
ForeignKey,
|
ForeignKey,
|
||||||
)
|
)
|
||||||
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column, relationship
|
from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column, relationship
|
||||||
|
from sqlalchemy.schema import Index
|
||||||
|
|
||||||
from argos.checks import BaseCheck, get_registered_check
|
from argos.checks import BaseCheck, get_registered_check
|
||||||
from argos.schemas import WebsiteCheck
|
from argos.schemas import WebsiteCheck
|
||||||
|
from argos.schemas.utils import IPVersion, Method, Todo
|
||||||
|
|
||||||
|
|
||||||
|
def compute_task_group(context) -> str:
|
||||||
|
data = context.current_parameters["request_data"]
|
||||||
|
if data is None:
|
||||||
|
data = ""
|
||||||
|
return (
|
||||||
|
f"{context.current_parameters['method']}-"
|
||||||
|
f"{context.current_parameters['ip_version']}-"
|
||||||
|
f"{context.current_parameters['url']}-"
|
||||||
|
f"{md5(data.encode()).hexdigest()}"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Base(DeclarativeBase):
|
class Base(DeclarativeBase):
|
||||||
type_annotation_map = {List[WebsiteCheck]: JSON, dict: JSON}
|
type_annotation_map = {List[WebsiteCheck]: JSON, dict: JSON}
|
||||||
|
|
||||||
|
|
||||||
|
class Job(Base):
|
||||||
|
"""
|
||||||
|
Job queue emulation
|
||||||
|
"""
|
||||||
|
|
||||||
|
__tablename__ = "jobs"
|
||||||
|
id: Mapped[int] = mapped_column(primary_key=True)
|
||||||
|
todo: Mapped[Todo] = mapped_column(Enum("RELOAD_CONFIG", name="todo_enum"))
|
||||||
|
args: Mapped[str] = mapped_column()
|
||||||
|
current: Mapped[bool] = mapped_column(insert_default=False)
|
||||||
|
added_at: Mapped[datetime] = mapped_column()
|
||||||
|
|
||||||
|
|
||||||
class Task(Base):
|
class Task(Base):
|
||||||
"""
|
"""
|
||||||
There is one task per check.
|
There is one task per check.
|
||||||
|
@ -32,15 +60,39 @@ class Task(Base):
|
||||||
# Info needed to run the task
|
# Info needed to run the task
|
||||||
url: Mapped[str] = mapped_column()
|
url: Mapped[str] = mapped_column()
|
||||||
domain: Mapped[str] = mapped_column()
|
domain: Mapped[str] = mapped_column()
|
||||||
|
ip_version: Mapped[IPVersion] = mapped_column(
|
||||||
|
Enum("4", "6", name="ip_version_enum"),
|
||||||
|
)
|
||||||
check: Mapped[str] = mapped_column()
|
check: Mapped[str] = mapped_column()
|
||||||
expected: Mapped[str] = mapped_column()
|
expected: Mapped[str] = mapped_column()
|
||||||
frequency: Mapped[int] = mapped_column()
|
frequency: Mapped[float] = mapped_column()
|
||||||
|
recheck_delay: Mapped[float] = mapped_column(nullable=True)
|
||||||
|
already_retried: Mapped[bool] = mapped_column(insert_default=False)
|
||||||
|
retry_before_notification: Mapped[int] = mapped_column(insert_default=0)
|
||||||
|
contiguous_failures: Mapped[int] = mapped_column(insert_default=0)
|
||||||
|
method: Mapped[Method] = mapped_column(
|
||||||
|
Enum(
|
||||||
|
"GET",
|
||||||
|
"HEAD",
|
||||||
|
"POST",
|
||||||
|
"OPTIONS",
|
||||||
|
"CONNECT",
|
||||||
|
"TRACE",
|
||||||
|
"PUT",
|
||||||
|
"PATCH",
|
||||||
|
"DELETE",
|
||||||
|
name="method",
|
||||||
|
),
|
||||||
|
insert_default="GET",
|
||||||
|
)
|
||||||
|
request_data: Mapped[str] = mapped_column(nullable=True)
|
||||||
|
|
||||||
# Orchestration-related
|
# Orchestration-related
|
||||||
selected_by: Mapped[str] = mapped_column(nullable=True)
|
selected_by: Mapped[str] = mapped_column(nullable=True)
|
||||||
selected_at: Mapped[datetime] = mapped_column(nullable=True)
|
selected_at: Mapped[datetime] = mapped_column(nullable=True)
|
||||||
completed_at: Mapped[datetime] = mapped_column(nullable=True)
|
completed_at: Mapped[datetime] = mapped_column(nullable=True)
|
||||||
next_run: Mapped[datetime] = mapped_column(nullable=True)
|
next_run: Mapped[datetime] = mapped_column(nullable=True)
|
||||||
|
task_group: Mapped[str] = mapped_column(insert_default=compute_task_group)
|
||||||
|
|
||||||
severity: Mapped[Literal["ok", "warning", "critical", "unknown"]] = mapped_column(
|
severity: Mapped[Literal["ok", "warning", "critical", "unknown"]] = mapped_column(
|
||||||
Enum("ok", "warning", "critical", "unknown", name="severity"),
|
Enum("ok", "warning", "critical", "unknown", name="severity"),
|
||||||
|
@ -54,8 +106,8 @@ class Task(Base):
|
||||||
passive_deletes=True,
|
passive_deletes=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self) -> str:
|
||||||
return f"DB Task {self.url} - {self.check} - {self.expected}"
|
return f"DB Task {self.url} (IPv{self.ip_version}) - {self.check} - {self.expected}"
|
||||||
|
|
||||||
def get_check(self) -> BaseCheck:
|
def get_check(self) -> BaseCheck:
|
||||||
"""Returns a check instance for this specific task"""
|
"""Returns a check instance for this specific task"""
|
||||||
|
@ -70,7 +122,16 @@ class Task(Base):
|
||||||
|
|
||||||
now = datetime.now()
|
now = datetime.now()
|
||||||
self.completed_at = now
|
self.completed_at = now
|
||||||
self.next_run = now + timedelta(minutes=self.frequency)
|
if (
|
||||||
|
self.recheck_delay is not None
|
||||||
|
and severity != "ok"
|
||||||
|
and not self.already_retried
|
||||||
|
):
|
||||||
|
self.next_run = now + timedelta(minutes=self.recheck_delay)
|
||||||
|
self.already_retried = True
|
||||||
|
else:
|
||||||
|
self.next_run = now + timedelta(minutes=self.frequency)
|
||||||
|
self.already_retried = False
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def last_result(self):
|
def last_result(self):
|
||||||
|
@ -87,6 +148,9 @@ class Task(Base):
|
||||||
return self.last_result.status
|
return self.last_result.status
|
||||||
|
|
||||||
|
|
||||||
|
Index("similar_tasks", Task.task_group)
|
||||||
|
|
||||||
|
|
||||||
class Result(Base):
|
class Result(Base):
|
||||||
"""There are multiple results per task.
|
"""There are multiple results per task.
|
||||||
|
|
||||||
|
@ -138,3 +202,18 @@ class ConfigCache(Base):
|
||||||
name: Mapped[str] = mapped_column(primary_key=True)
|
name: Mapped[str] = mapped_column(primary_key=True)
|
||||||
val: Mapped[str] = mapped_column()
|
val: Mapped[str] = mapped_column()
|
||||||
updated_at: Mapped[datetime] = mapped_column()
|
updated_at: Mapped[datetime] = mapped_column()
|
||||||
|
|
||||||
|
|
||||||
|
class User(Base):
|
||||||
|
"""Database model for user authentication"""
|
||||||
|
|
||||||
|
__tablename__ = "users"
|
||||||
|
username: Mapped[str] = mapped_column(primary_key=True)
|
||||||
|
password: Mapped[str] = mapped_column()
|
||||||
|
disabled: Mapped[bool] = mapped_column()
|
||||||
|
created_at: Mapped[datetime] = mapped_column(default=datetime.now())
|
||||||
|
updated_at: Mapped[datetime] = mapped_column(nullable=True)
|
||||||
|
last_login_at: Mapped[datetime] = mapped_column(nullable=True)
|
||||||
|
|
||||||
|
def update_last_login_at(self):
|
||||||
|
self.last_login_at = datetime.now()
|
||||||
|
|
|
@ -4,25 +4,27 @@ from hashlib import sha256
|
||||||
from typing import List
|
from typing import List
|
||||||
from urllib.parse import urljoin
|
from urllib.parse import urljoin
|
||||||
|
|
||||||
from sqlalchemy import desc, func
|
from sqlalchemy import asc, func, Select
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
from argos import schemas
|
from argos import schemas
|
||||||
from argos.logging import logger
|
from argos.logging import logger
|
||||||
from argos.server.models import Result, Task, ConfigCache
|
from argos.server.models import ConfigCache, Job, Result, Task, User
|
||||||
|
from argos.server.settings import read_config
|
||||||
|
|
||||||
|
|
||||||
async def list_tasks(db: Session, agent_id: str, limit: int = 100):
|
async def list_tasks(db: Session, agent_id: str, limit: int = 100):
|
||||||
"""List tasks and mark them as selected"""
|
"""List tasks and mark them as selected"""
|
||||||
tasks = (
|
subquery = (
|
||||||
db.query(Task)
|
db.query(func.distinct(Task.task_group))
|
||||||
.filter(
|
.filter(
|
||||||
Task.selected_by == None, # noqa: E711
|
Task.selected_by == None, # noqa: E711
|
||||||
((Task.next_run <= datetime.now()) | (Task.next_run == None)), # noqa: E711
|
((Task.next_run <= datetime.now()) | (Task.next_run == None)), # noqa: E711
|
||||||
)
|
)
|
||||||
.limit(limit)
|
.limit(limit)
|
||||||
.all()
|
.subquery()
|
||||||
)
|
)
|
||||||
|
tasks = db.query(Task).filter(Task.task_group.in_(Select(subquery))).all()
|
||||||
|
|
||||||
now = datetime.now()
|
now = datetime.now()
|
||||||
for task in tasks:
|
for task in tasks:
|
||||||
|
@ -32,7 +34,26 @@ async def list_tasks(db: Session, agent_id: str, limit: int = 100):
|
||||||
return tasks
|
return tasks
|
||||||
|
|
||||||
|
|
||||||
async def get_task(db: Session, task_id: int) -> Task:
|
async def add_user(db: Session, name: str, password: str) -> User:
|
||||||
|
user = User(
|
||||||
|
username=name,
|
||||||
|
password=password,
|
||||||
|
disabled=False,
|
||||||
|
)
|
||||||
|
db.add(user)
|
||||||
|
db.commit()
|
||||||
|
return user
|
||||||
|
|
||||||
|
|
||||||
|
async def get_user(db: Session, username: str) -> None | User:
|
||||||
|
return db.get(User, username)
|
||||||
|
|
||||||
|
|
||||||
|
async def list_users(db: Session):
|
||||||
|
return db.query(User).order_by(asc(User.username))
|
||||||
|
|
||||||
|
|
||||||
|
async def get_task(db: Session, task_id: int) -> None | Task:
|
||||||
return db.get(Task, task_id)
|
return db.get(Task, task_id)
|
||||||
|
|
||||||
|
|
||||||
|
@ -48,12 +69,13 @@ async def create_result(db: Session, agent_result: schemas.AgentResult, agent_id
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
async def count_tasks(db: Session, selected=False):
|
async def count_tasks(db: Session, selected: None | bool = None):
|
||||||
query = db.query(Task)
|
query = db.query(Task)
|
||||||
if selected:
|
if selected is not None:
|
||||||
query = query.filter(Task.selected_by is not None)
|
if selected:
|
||||||
else:
|
query = query.filter(Task.selected_by is not None) # type: ignore[arg-type]
|
||||||
query = query.filter(Task.selected_by is None)
|
else:
|
||||||
|
query = query.filter(Task.selected_by is None) # type: ignore[arg-type]
|
||||||
|
|
||||||
return query.count()
|
return query.count()
|
||||||
|
|
||||||
|
@ -62,13 +84,22 @@ async def count_results(db: Session):
|
||||||
return db.query(Result).count()
|
return db.query(Result).count()
|
||||||
|
|
||||||
|
|
||||||
async def has_config_changed(db: Session, config: schemas.Config) -> bool:
|
async def has_config_changed(db: Session, config: schemas.Config) -> bool: # pylint: disable-msg=too-many-statements
|
||||||
"""Check if websites config has changed by using a hashsum and a config cache"""
|
"""Check if websites config has changed by using a hashsum and a config cache"""
|
||||||
websites_hash = sha256(str(config.websites).encode()).hexdigest()
|
websites_hash = sha256(str(config.websites).encode()).hexdigest()
|
||||||
conf_caches = db.query(ConfigCache).all()
|
conf_caches = db.query(ConfigCache).all()
|
||||||
same_config = True
|
same_config = True
|
||||||
|
keys = [
|
||||||
|
"websites_hash",
|
||||||
|
"general_frequency",
|
||||||
|
"general_recheck_delay",
|
||||||
|
"general_retry_before_notification",
|
||||||
|
"general_ipv4",
|
||||||
|
"general_ipv6",
|
||||||
|
]
|
||||||
if conf_caches:
|
if conf_caches:
|
||||||
for conf in conf_caches:
|
for conf in conf_caches:
|
||||||
|
keys.remove(conf.name)
|
||||||
match conf.name:
|
match conf.name:
|
||||||
case "websites_hash":
|
case "websites_hash":
|
||||||
if conf.val != websites_hash:
|
if conf.val != websites_hash:
|
||||||
|
@ -78,11 +109,74 @@ async def has_config_changed(db: Session, config: schemas.Config) -> bool:
|
||||||
case "general_frequency":
|
case "general_frequency":
|
||||||
if conf.val != str(config.general.frequency):
|
if conf.val != str(config.general.frequency):
|
||||||
same_config = False
|
same_config = False
|
||||||
conf.val = config.general.frequency
|
conf.val = str(config.general.frequency)
|
||||||
|
conf.updated_at = datetime.now()
|
||||||
|
case "general_recheck_delay":
|
||||||
|
if conf.val != str(config.general.recheck_delay):
|
||||||
|
same_config = False
|
||||||
|
conf.val = str(config.general.recheck_delay)
|
||||||
|
conf.updated_at = datetime.now()
|
||||||
|
case "general_retry_before_notification":
|
||||||
|
if conf.val != str(config.general.retry_before_notification):
|
||||||
|
same_config = False
|
||||||
|
conf.val = str(config.general.retry_before_notification)
|
||||||
|
conf.updated_at = datetime.now()
|
||||||
|
case "general_ipv4":
|
||||||
|
if conf.val != str(config.general.ipv4):
|
||||||
|
same_config = False
|
||||||
|
conf.val = str(config.general.ipv4)
|
||||||
|
conf.updated_at = datetime.now()
|
||||||
|
case "general_ipv6":
|
||||||
|
if conf.val != str(config.general.ipv6):
|
||||||
|
same_config = False
|
||||||
|
conf.val = str(config.general.ipv6)
|
||||||
conf.updated_at = datetime.now()
|
conf.updated_at = datetime.now()
|
||||||
|
|
||||||
|
for i in keys:
|
||||||
|
match i:
|
||||||
|
case "websites_hash":
|
||||||
|
c = ConfigCache(
|
||||||
|
name="websites_hash",
|
||||||
|
val=websites_hash,
|
||||||
|
updated_at=datetime.now(),
|
||||||
|
)
|
||||||
|
case "general_frequency":
|
||||||
|
c = ConfigCache(
|
||||||
|
name="general_frequency",
|
||||||
|
val=str(config.general.frequency),
|
||||||
|
updated_at=datetime.now(),
|
||||||
|
)
|
||||||
|
case "general_recheck_delay":
|
||||||
|
c = ConfigCache(
|
||||||
|
name="general_recheck_delay",
|
||||||
|
val=str(config.general.recheck_delay),
|
||||||
|
updated_at=datetime.now(),
|
||||||
|
)
|
||||||
|
case "general_retry_before_notification":
|
||||||
|
c = ConfigCache(
|
||||||
|
name="general_retry_before_notification",
|
||||||
|
val=str(config.general.retry_before_notification),
|
||||||
|
updated_at=datetime.now(),
|
||||||
|
)
|
||||||
|
case "general_ipv4":
|
||||||
|
c = ConfigCache(
|
||||||
|
name="general_ipv4",
|
||||||
|
val=str(config.general.ipv4),
|
||||||
|
updated_at=datetime.now(),
|
||||||
|
)
|
||||||
|
case "general_ipv6":
|
||||||
|
c = ConfigCache(
|
||||||
|
name="general_ipv6",
|
||||||
|
val=str(config.general.ipv6),
|
||||||
|
updated_at=datetime.now(),
|
||||||
|
)
|
||||||
|
db.add(c)
|
||||||
|
|
||||||
db.commit()
|
db.commit()
|
||||||
|
|
||||||
|
if keys:
|
||||||
|
return True
|
||||||
|
|
||||||
if same_config:
|
if same_config:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -95,70 +189,182 @@ async def has_config_changed(db: Session, config: schemas.Config) -> bool:
|
||||||
val=str(config.general.frequency),
|
val=str(config.general.frequency),
|
||||||
updated_at=datetime.now(),
|
updated_at=datetime.now(),
|
||||||
)
|
)
|
||||||
|
gen_recheck = ConfigCache(
|
||||||
|
name="general_recheck_delay",
|
||||||
|
val=str(config.general.recheck_delay),
|
||||||
|
updated_at=datetime.now(),
|
||||||
|
)
|
||||||
|
gen_retry_before_notif = ConfigCache(
|
||||||
|
name="general_retry_before_notification",
|
||||||
|
val=str(config.general.retry_before_notification),
|
||||||
|
updated_at=datetime.now(),
|
||||||
|
)
|
||||||
|
gen_ipv4 = ConfigCache(
|
||||||
|
name="general_ipv4",
|
||||||
|
val=str(config.general.ipv4),
|
||||||
|
updated_at=datetime.now(),
|
||||||
|
)
|
||||||
|
gen_ipv6 = ConfigCache(
|
||||||
|
name="general_ipv6",
|
||||||
|
val=str(config.general.ipv6),
|
||||||
|
updated_at=datetime.now(),
|
||||||
|
)
|
||||||
db.add(web_hash)
|
db.add(web_hash)
|
||||||
db.add(gen_freq)
|
db.add(gen_freq)
|
||||||
|
db.add(gen_recheck)
|
||||||
|
db.add(gen_retry_before_notif)
|
||||||
|
db.add(gen_ipv4)
|
||||||
|
db.add(gen_ipv6)
|
||||||
db.commit()
|
db.commit()
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
async def update_from_config(db: Session, config: schemas.Config):
|
async def update_from_config_later(db: Session, config_file):
|
||||||
"""Update tasks from config file"""
|
"""Ask Argos to reload configuration in a recurring task"""
|
||||||
config_changed = await has_config_changed(db, config)
|
jobs = (
|
||||||
if not config_changed:
|
db.query(Job)
|
||||||
return {"added": 0, "vanished": 0}
|
.filter(
|
||||||
|
Job.todo == "RELOAD_CONFIG",
|
||||||
|
Job.args == config_file,
|
||||||
|
Job.current == False,
|
||||||
|
)
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
if jobs:
|
||||||
|
return "There is already a config reloading job in the job queue, for the same file"
|
||||||
|
|
||||||
|
job = Job(todo="RELOAD_CONFIG", args=config_file, added_at=datetime.now())
|
||||||
|
db.add(job)
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
return "Config reloading has been added in the job queue"
|
||||||
|
|
||||||
|
|
||||||
|
async def process_jobs(db: Session) -> int:
|
||||||
|
"""Process job queue"""
|
||||||
|
jobs = db.query(Job).filter(Job.current == False).all()
|
||||||
|
if jobs:
|
||||||
|
for job in jobs:
|
||||||
|
job.current = True
|
||||||
|
db.commit()
|
||||||
|
if job.todo == "RELOAD_CONFIG":
|
||||||
|
logger.info("Processing job %i: %s %s", job.id, job.todo, job.args)
|
||||||
|
_config = read_config(job.args)
|
||||||
|
changed = await update_from_config(db, _config)
|
||||||
|
logger.info("%i task(s) added", changed["added"])
|
||||||
|
logger.info("%i task(s) deleted", changed["vanished"])
|
||||||
|
db.delete(job)
|
||||||
|
|
||||||
|
db.commit()
|
||||||
|
return len(jobs)
|
||||||
|
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
async def update_from_config(db: Session, config: schemas.Config): # pylint: disable-msg=too-many-branches
|
||||||
|
"""Update tasks from config file"""
|
||||||
max_task_id = (
|
max_task_id = (
|
||||||
db.query(func.max(Task.id).label("max_id")).all() # pylint: disable-msg=not-callable
|
db.query(func.max(Task.id).label("max_id")).all() # pylint: disable-msg=not-callable
|
||||||
)[0].max_id
|
)[0].max_id
|
||||||
tasks = []
|
tasks = []
|
||||||
unique_properties = []
|
unique_properties = []
|
||||||
seen_tasks: List[int] = []
|
seen_tasks: List[int] = []
|
||||||
for website in config.websites:
|
for website in config.websites: # pylint: disable-msg=too-many-nested-blocks
|
||||||
domain = str(website.domain)
|
domain = str(website.domain)
|
||||||
frequency = website.frequency or config.general.frequency
|
frequency = website.frequency or config.general.frequency
|
||||||
|
recheck_delay = website.recheck_delay or config.general.recheck_delay
|
||||||
|
retry_before_notification = (
|
||||||
|
website.retry_before_notification
|
||||||
|
if website.retry_before_notification is not None
|
||||||
|
else config.general.retry_before_notification
|
||||||
|
)
|
||||||
|
ipv4 = website.ipv4 if website.ipv4 is not None else config.general.ipv4
|
||||||
|
ipv6 = website.ipv6 if website.ipv6 is not None else config.general.ipv6
|
||||||
|
if ipv4 is False and ipv6 is False:
|
||||||
|
logger.warning("IPv4 AND IPv6 are disabled on website %s!", domain)
|
||||||
|
continue
|
||||||
|
|
||||||
for p in website.paths:
|
for ip_version in ["4", "6"]:
|
||||||
url = urljoin(domain, str(p.path))
|
for p in website.paths:
|
||||||
for check_key, expected in p.checks:
|
url = urljoin(domain, str(p.path))
|
||||||
# Check the db for already existing tasks.
|
for check_key, expected in p.checks:
|
||||||
existing_tasks = (
|
# Check the db for already existing tasks.
|
||||||
db.query(Task)
|
existing_tasks = (
|
||||||
.filter(
|
db.query(Task)
|
||||||
Task.url == url,
|
.filter(
|
||||||
Task.check == check_key,
|
Task.url == url,
|
||||||
Task.expected == expected,
|
Task.method == p.method,
|
||||||
)
|
Task.request_data == p.request_data,
|
||||||
.all()
|
Task.check == check_key,
|
||||||
)
|
Task.expected == expected,
|
||||||
if existing_tasks:
|
Task.ip_version == ip_version,
|
||||||
existing_task = existing_tasks[0]
|
|
||||||
seen_tasks.append(existing_task.id)
|
|
||||||
|
|
||||||
if frequency != existing_task.frequency:
|
|
||||||
existing_task.frequency = frequency
|
|
||||||
logger.debug(
|
|
||||||
"Skipping db task creation for url=%s, "
|
|
||||||
"check_key=%s, expected=%s, frequency=%s.",
|
|
||||||
url,
|
|
||||||
check_key,
|
|
||||||
expected,
|
|
||||||
frequency,
|
|
||||||
)
|
|
||||||
|
|
||||||
else:
|
|
||||||
properties = (url, check_key, expected)
|
|
||||||
if properties not in unique_properties:
|
|
||||||
unique_properties.append(properties)
|
|
||||||
task = Task(
|
|
||||||
domain=domain,
|
|
||||||
url=url,
|
|
||||||
check=check_key,
|
|
||||||
expected=expected,
|
|
||||||
frequency=frequency,
|
|
||||||
)
|
)
|
||||||
logger.debug("Adding a new task in the db: %s", task)
|
.all()
|
||||||
tasks.append(task)
|
)
|
||||||
|
|
||||||
|
if (ip_version == "4" and ipv4 is False) or (
|
||||||
|
ip_version == "6" and ipv6 is False
|
||||||
|
):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if existing_tasks:
|
||||||
|
existing_task = existing_tasks[0]
|
||||||
|
|
||||||
|
seen_tasks.append(existing_task.id)
|
||||||
|
|
||||||
|
if frequency != existing_task.frequency:
|
||||||
|
existing_task.frequency = frequency
|
||||||
|
if recheck_delay != existing_task.recheck_delay:
|
||||||
|
existing_task.recheck_delay = recheck_delay # type: ignore[assignment]
|
||||||
|
if (
|
||||||
|
retry_before_notification
|
||||||
|
!= existing_task.retry_before_notification
|
||||||
|
):
|
||||||
|
existing_task.retry_before_notification = (
|
||||||
|
retry_before_notification
|
||||||
|
)
|
||||||
|
logger.debug(
|
||||||
|
"Skipping db task creation for url=%s, "
|
||||||
|
"method=%s, check_key=%s, expected=%s, "
|
||||||
|
"frequency=%s, recheck_delay=%s, "
|
||||||
|
"retry_before_notification=%s, ip_version=%s.",
|
||||||
|
url,
|
||||||
|
p.method,
|
||||||
|
check_key,
|
||||||
|
expected,
|
||||||
|
frequency,
|
||||||
|
recheck_delay,
|
||||||
|
retry_before_notification,
|
||||||
|
ip_version,
|
||||||
|
)
|
||||||
|
|
||||||
|
else:
|
||||||
|
properties = (
|
||||||
|
url,
|
||||||
|
p.method,
|
||||||
|
check_key,
|
||||||
|
expected,
|
||||||
|
ip_version,
|
||||||
|
p.request_data,
|
||||||
|
)
|
||||||
|
if properties not in unique_properties:
|
||||||
|
unique_properties.append(properties)
|
||||||
|
task = Task(
|
||||||
|
domain=domain,
|
||||||
|
url=url,
|
||||||
|
ip_version=ip_version,
|
||||||
|
method=p.method,
|
||||||
|
request_data=p.request_data,
|
||||||
|
check=check_key,
|
||||||
|
expected=expected,
|
||||||
|
frequency=frequency,
|
||||||
|
recheck_delay=recheck_delay,
|
||||||
|
retry_before_notification=retry_before_notification,
|
||||||
|
already_retried=False,
|
||||||
|
)
|
||||||
|
logger.debug("Adding a new task in the db: %s", task)
|
||||||
|
tasks.append(task)
|
||||||
|
|
||||||
db.add_all(tasks)
|
db.add_all(tasks)
|
||||||
db.commit()
|
db.commit()
|
||||||
|
@ -172,7 +378,8 @@ async def update_from_config(db: Session, config: schemas.Config):
|
||||||
)
|
)
|
||||||
db.commit()
|
db.commit()
|
||||||
logger.info(
|
logger.info(
|
||||||
"%i tasks has been removed since not in config file anymore", vanished_tasks
|
"%i task(s) has been removed since not in config file anymore",
|
||||||
|
vanished_tasks,
|
||||||
)
|
)
|
||||||
return {"added": len(tasks), "vanished": vanished_tasks}
|
return {"added": len(tasks), "vanished": vanished_tasks}
|
||||||
|
|
||||||
|
@ -188,7 +395,7 @@ async def get_severity_counts(db: Session) -> dict:
|
||||||
# Execute the query and fetch the results
|
# Execute the query and fetch the results
|
||||||
task_counts_by_severity = query.all()
|
task_counts_by_severity = query.all()
|
||||||
|
|
||||||
counts_dict = dict(task_counts_by_severity)
|
counts_dict = dict(task_counts_by_severity) # type: ignore[var-annotated,arg-type]
|
||||||
for key in ("ok", "warning", "critical", "unknown"):
|
for key in ("ok", "warning", "critical", "unknown"):
|
||||||
counts_dict.setdefault(key, 0)
|
counts_dict.setdefault(key, 0)
|
||||||
return counts_dict
|
return counts_dict
|
||||||
|
@ -202,28 +409,13 @@ async def reschedule_all(db: Session):
|
||||||
db.commit()
|
db.commit()
|
||||||
|
|
||||||
|
|
||||||
async def remove_old_results(db: Session, max_results: int):
|
async def remove_old_results(db: Session, max_results_age: float):
|
||||||
tasks = db.query(Task).all()
|
"""Remove old results, base on age"""
|
||||||
deleted = 0
|
max_acceptable_time = datetime.now() - timedelta(seconds=max_results_age)
|
||||||
for task in tasks:
|
deleted = (
|
||||||
# Get the id of the oldest result to keep
|
db.query(Result).filter(Result.submitted_at < max_acceptable_time).delete()
|
||||||
subquery = (
|
)
|
||||||
db.query(Result.id)
|
db.commit()
|
||||||
.filter(Result.task_id == task.id)
|
|
||||||
.order_by(desc(Result.id))
|
|
||||||
.limit(max_results)
|
|
||||||
.subquery()
|
|
||||||
)
|
|
||||||
min_id = db.query(func.min(subquery.c.id)).scalar() # pylint: disable-msg=not-callable
|
|
||||||
|
|
||||||
# Delete all the results older than min_id
|
|
||||||
if min_id:
|
|
||||||
deleted += (
|
|
||||||
db.query(Result)
|
|
||||||
.where(Result.id < min_id, Result.task_id == task.id)
|
|
||||||
.delete()
|
|
||||||
)
|
|
||||||
db.commit()
|
|
||||||
|
|
||||||
return deleted
|
return deleted
|
||||||
|
|
||||||
|
@ -240,3 +432,11 @@ async def release_old_locks(db: Session, max_lock_seconds: int):
|
||||||
)
|
)
|
||||||
db.commit()
|
db.commit()
|
||||||
return updated
|
return updated
|
||||||
|
|
||||||
|
|
||||||
|
async def get_recent_agents_count(db: Session, minutes: int):
|
||||||
|
"""Get agents seen less than <minutes> ago"""
|
||||||
|
max_time = datetime.now() - timedelta(minutes=minutes)
|
||||||
|
|
||||||
|
agents = db.query(Result.agent_id).filter(Result.submitted_at > max_time).distinct()
|
||||||
|
return agents.count()
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
"""Web interface for machines"""
|
"""Web interface for machines"""
|
||||||
from typing import List, Union
|
from typing import List
|
||||||
|
|
||||||
from fastapi import APIRouter, BackgroundTasks, Depends, Request
|
from fastapi import APIRouter, BackgroundTasks, Depends, Request
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
|
@ -7,7 +7,7 @@ from sqlalchemy.orm import Session
|
||||||
from argos.logging import logger
|
from argos.logging import logger
|
||||||
from argos.schemas import AgentResult, Config, Task
|
from argos.schemas import AgentResult, Config, Task
|
||||||
from argos.server import queries
|
from argos.server import queries
|
||||||
from argos.server.alerting import handle_alert
|
from argos.server.alerting import handle_alert, need_alert
|
||||||
from argos.server.routes.dependencies import get_config, get_db, verify_token
|
from argos.server.routes.dependencies import get_config, get_db, verify_token
|
||||||
|
|
||||||
route = APIRouter()
|
route = APIRouter()
|
||||||
|
@ -18,22 +18,25 @@ async def read_tasks(
|
||||||
request: Request,
|
request: Request,
|
||||||
db: Session = Depends(get_db),
|
db: Session = Depends(get_db),
|
||||||
limit: int = 10,
|
limit: int = 10,
|
||||||
agent_id: Union[None, str] = None,
|
agent_id: None | str = None,
|
||||||
):
|
):
|
||||||
"""Return a list of tasks to execute"""
|
"""Return a list of tasks to execute"""
|
||||||
agent_id = agent_id or request.client.host
|
host = ""
|
||||||
|
if request.client is not None:
|
||||||
|
host = request.client.host
|
||||||
|
agent_id = agent_id or host
|
||||||
tasks = await queries.list_tasks(db, agent_id=agent_id, limit=limit)
|
tasks = await queries.list_tasks(db, agent_id=agent_id, limit=limit)
|
||||||
return tasks
|
return tasks
|
||||||
|
|
||||||
|
|
||||||
@route.post("/results", status_code=201, dependencies=[Depends(verify_token)])
|
@route.post("/results", status_code=201, dependencies=[Depends(verify_token)])
|
||||||
async def create_results(
|
async def create_results( # pylint: disable-msg=too-many-positional-arguments
|
||||||
request: Request,
|
request: Request,
|
||||||
results: List[AgentResult],
|
results: List[AgentResult],
|
||||||
background_tasks: BackgroundTasks,
|
background_tasks: BackgroundTasks,
|
||||||
db: Session = Depends(get_db),
|
db: Session = Depends(get_db),
|
||||||
config: Config = Depends(get_config),
|
config: Config = Depends(get_config),
|
||||||
agent_id: Union[None, str] = None,
|
agent_id: None | str = None,
|
||||||
):
|
):
|
||||||
"""Get the results from the agents and store them locally.
|
"""Get the results from the agents and store them locally.
|
||||||
|
|
||||||
|
@ -42,7 +45,10 @@ async def create_results(
|
||||||
- If it's an error, determine its severity ;
|
- If it's an error, determine its severity ;
|
||||||
- Trigger the reporting calls
|
- Trigger the reporting calls
|
||||||
"""
|
"""
|
||||||
agent_id = agent_id or request.client.host
|
host = ""
|
||||||
|
if request.client is not None:
|
||||||
|
host = request.client.host
|
||||||
|
agent_id = agent_id or host
|
||||||
db_results = []
|
db_results = []
|
||||||
for agent_result in results:
|
for agent_result in results:
|
||||||
# XXX Maybe offload this to a queue.
|
# XXX Maybe offload this to a queue.
|
||||||
|
@ -52,16 +58,26 @@ async def create_results(
|
||||||
logger.error("Unable to find task %i", agent_result.task_id)
|
logger.error("Unable to find task %i", agent_result.task_id)
|
||||||
else:
|
else:
|
||||||
last_severity = task.severity
|
last_severity = task.severity
|
||||||
|
last_severity_update = task.last_severity_update
|
||||||
result = await queries.create_result(db, agent_result, agent_id)
|
result = await queries.create_result(db, agent_result, agent_id)
|
||||||
check = task.get_check()
|
check = task.get_check()
|
||||||
status, severity = await check.finalize(config, result, **result.context)
|
status, severity = await check.finalize(config, result, **result.context)
|
||||||
result.set_status(status, severity)
|
result.set_status(status, severity)
|
||||||
task.set_times_severity_and_deselect(severity, result.submitted_at)
|
task.set_times_severity_and_deselect(severity, result.submitted_at)
|
||||||
|
|
||||||
# Don’t create an alert if the severity has not changed
|
send_notif = need_alert(
|
||||||
if last_severity != severity:
|
last_severity, last_severity_update, severity, status, task
|
||||||
|
)
|
||||||
|
|
||||||
|
if send_notif:
|
||||||
background_tasks.add_task(
|
background_tasks.add_task(
|
||||||
handle_alert, config, result, task, severity, last_severity, request
|
handle_alert,
|
||||||
|
config,
|
||||||
|
result,
|
||||||
|
task,
|
||||||
|
severity,
|
||||||
|
last_severity,
|
||||||
|
request,
|
||||||
)
|
)
|
||||||
|
|
||||||
db_results.append(result)
|
db_results.append(result)
|
||||||
|
|
|
@ -1,5 +1,8 @@
|
||||||
from fastapi import Depends, HTTPException, Request
|
from fastapi import Depends, HTTPException, Request
|
||||||
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
|
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
|
||||||
|
from fastapi_login import LoginManager
|
||||||
|
|
||||||
|
from argos.logging import logger
|
||||||
|
|
||||||
auth_scheme = HTTPBearer()
|
auth_scheme = HTTPBearer()
|
||||||
|
|
||||||
|
@ -16,6 +19,13 @@ def get_config(request: Request):
|
||||||
return request.app.state.config
|
return request.app.state.config
|
||||||
|
|
||||||
|
|
||||||
|
async def get_manager(request: Request) -> LoginManager:
|
||||||
|
if request.app.state.config.general.unauthenticated_access is not None:
|
||||||
|
return await request.app.state.manager.optional(request)
|
||||||
|
|
||||||
|
return await request.app.state.manager(request)
|
||||||
|
|
||||||
|
|
||||||
async def verify_token(
|
async def verify_token(
|
||||||
request: Request, token: HTTPAuthorizationCredentials = Depends(auth_scheme)
|
request: Request, token: HTTPAuthorizationCredentials = Depends(auth_scheme)
|
||||||
):
|
):
|
||||||
|
@ -23,3 +33,35 @@ async def verify_token(
|
||||||
if token.credentials not in request.app.state.config.service.secrets:
|
if token.credentials not in request.app.state.config.service.secrets:
|
||||||
raise HTTPException(status_code=401, detail="Unauthorized")
|
raise HTTPException(status_code=401, detail="Unauthorized")
|
||||||
return token
|
return token
|
||||||
|
|
||||||
|
|
||||||
|
async def find_ldap_user(config, ldapobj, user: str) -> str | None:
|
||||||
|
"""Do a LDAP search for user and return its dn"""
|
||||||
|
import ldap
|
||||||
|
import ldap.filter as ldap_filter
|
||||||
|
from ldapurl import LDAP_SCOPE_SUBTREE
|
||||||
|
|
||||||
|
try:
|
||||||
|
ldapobj.simple_bind_s(config.general.ldap.bind_dn, config.general.ldap.bind_pwd)
|
||||||
|
except ldap.LDAPError as err: # pylint: disable-msg=no-member
|
||||||
|
logger.error("LDAP error: %s", err)
|
||||||
|
return None
|
||||||
|
|
||||||
|
result = ldapobj.search_s(
|
||||||
|
config.general.ldap.user_tree,
|
||||||
|
LDAP_SCOPE_SUBTREE,
|
||||||
|
filterstr=ldap_filter.filter_format(
|
||||||
|
f"(&(%s=%s){config.general.ldap.user_filter})",
|
||||||
|
[
|
||||||
|
config.general.ldap.user_attr,
|
||||||
|
user,
|
||||||
|
],
|
||||||
|
),
|
||||||
|
attrlist=[config.general.ldap.user_attr],
|
||||||
|
)
|
||||||
|
|
||||||
|
# If there is a result, there should, logically, be only one entry
|
||||||
|
if len(result) > 0:
|
||||||
|
return result[0][0]
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
|
@ -1,31 +1,167 @@
|
||||||
"""Web interface for humans"""
|
"""Web interface for humans"""
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
from datetime import datetime, timedelta
|
||||||
from functools import cmp_to_key
|
from functools import cmp_to_key
|
||||||
from os import path
|
from pathlib import Path
|
||||||
from typing import Annotated
|
from typing import Annotated
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
from fastapi import APIRouter, Cookie, Depends, Form, Request, status
|
from fastapi import APIRouter, Cookie, Depends, Form, Request, status
|
||||||
from fastapi.responses import RedirectResponse
|
from fastapi.responses import RedirectResponse
|
||||||
|
from fastapi.security import OAuth2PasswordRequestForm
|
||||||
from fastapi.templating import Jinja2Templates
|
from fastapi.templating import Jinja2Templates
|
||||||
|
from passlib.context import CryptContext
|
||||||
from sqlalchemy import func
|
from sqlalchemy import func
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
from argos.checks.base import Status
|
||||||
from argos.schemas import Config
|
from argos.schemas import Config
|
||||||
from argos.server import queries
|
from argos.server import queries
|
||||||
from argos.server.models import Result, Task
|
from argos.server.exceptions import NotAuthenticatedException
|
||||||
from argos.server.routes.dependencies import get_config, get_db
|
from argos.server.models import Result, Task, User
|
||||||
|
from argos.server.routes.dependencies import get_config, get_db, get_manager
|
||||||
|
|
||||||
route = APIRouter()
|
route = APIRouter()
|
||||||
|
|
||||||
current_dir = path.dirname(__file__)
|
current_dir = Path(__file__).resolve().parent
|
||||||
templates = Jinja2Templates(directory=path.join(current_dir, "../templates"))
|
templates = Jinja2Templates(directory=current_dir / ".." / "templates")
|
||||||
SEVERITY_LEVELS = {"ok": 1, "warning": 2, "critical": 3, "unknown": 4}
|
SEVERITY_LEVELS = {"ok": 1, "warning": 2, "critical": 3, "unknown": 4}
|
||||||
|
|
||||||
|
|
||||||
|
@route.get("/login")
|
||||||
|
async def login_view(
|
||||||
|
request: Request,
|
||||||
|
msg: str | None = None,
|
||||||
|
config: Config = Depends(get_config),
|
||||||
|
):
|
||||||
|
if config.general.unauthenticated_access == "all":
|
||||||
|
return RedirectResponse(
|
||||||
|
request.url_for("get_severity_counts_view"),
|
||||||
|
status_code=status.HTTP_303_SEE_OTHER,
|
||||||
|
)
|
||||||
|
|
||||||
|
token = request.cookies.get("access-token")
|
||||||
|
if token is not None and token != "":
|
||||||
|
manager = request.app.state.manager
|
||||||
|
user = await manager.get_current_user(token)
|
||||||
|
if user is not None:
|
||||||
|
return RedirectResponse(
|
||||||
|
request.url_for("get_severity_counts_view"),
|
||||||
|
status_code=status.HTTP_303_SEE_OTHER,
|
||||||
|
)
|
||||||
|
|
||||||
|
if msg == "logout":
|
||||||
|
msg = "You have been successfully disconnected."
|
||||||
|
else:
|
||||||
|
msg = None
|
||||||
|
|
||||||
|
return templates.TemplateResponse(
|
||||||
|
"login.html",
|
||||||
|
{
|
||||||
|
"request": request,
|
||||||
|
"msg": msg,
|
||||||
|
"remember": config.general.remember_me_duration,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@route.post("/login")
|
||||||
|
async def post_login(
|
||||||
|
request: Request,
|
||||||
|
db: Session = Depends(get_db),
|
||||||
|
data: OAuth2PasswordRequestForm = Depends(),
|
||||||
|
rememberme: Annotated[str | None, Form()] = None,
|
||||||
|
config: Config = Depends(get_config),
|
||||||
|
):
|
||||||
|
if config.general.unauthenticated_access == "all":
|
||||||
|
return RedirectResponse(
|
||||||
|
request.url_for("get_severity_counts_view"),
|
||||||
|
status_code=status.HTTP_303_SEE_OTHER,
|
||||||
|
)
|
||||||
|
|
||||||
|
username = data.username
|
||||||
|
|
||||||
|
invalid_credentials = templates.TemplateResponse(
|
||||||
|
"login.html",
|
||||||
|
{"request": request, "msg": "Sorry, invalid username or bad password."},
|
||||||
|
)
|
||||||
|
|
||||||
|
if config.general.ldap is not None:
|
||||||
|
from ldap import INVALID_CREDENTIALS # pylint: disable-msg=no-name-in-module
|
||||||
|
from argos.server.routes.dependencies import find_ldap_user
|
||||||
|
|
||||||
|
invalid_credentials = templates.TemplateResponse(
|
||||||
|
"login.html",
|
||||||
|
{
|
||||||
|
"request": request,
|
||||||
|
"msg": "Sorry, invalid username or bad password. "
|
||||||
|
"Or the LDAP server is unreachable (see logs to verify).",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
ldap_dn = await find_ldap_user(config, request.app.state.ldap, username)
|
||||||
|
if ldap_dn is None:
|
||||||
|
return invalid_credentials
|
||||||
|
try:
|
||||||
|
request.app.state.ldap.simple_bind_s(ldap_dn, data.password)
|
||||||
|
except INVALID_CREDENTIALS:
|
||||||
|
return invalid_credentials
|
||||||
|
else:
|
||||||
|
user = await queries.get_user(db, username)
|
||||||
|
if user is None:
|
||||||
|
return invalid_credentials
|
||||||
|
|
||||||
|
pwd_context = CryptContext(schemes=["bcrypt"], deprecated="auto")
|
||||||
|
if not pwd_context.verify(data.password, user.password):
|
||||||
|
return invalid_credentials
|
||||||
|
|
||||||
|
user.last_login_at = datetime.now()
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
manager = request.app.state.manager
|
||||||
|
session_duration = config.general.session_duration
|
||||||
|
if config.general.remember_me_duration is not None and rememberme == "on":
|
||||||
|
session_duration = config.general.remember_me_duration
|
||||||
|
delta = timedelta(minutes=session_duration)
|
||||||
|
token = manager.create_access_token(data={"sub": username}, expires=delta)
|
||||||
|
response = RedirectResponse(
|
||||||
|
request.url_for("get_severity_counts_view"),
|
||||||
|
status_code=status.HTTP_303_SEE_OTHER,
|
||||||
|
)
|
||||||
|
response.set_cookie(
|
||||||
|
key=manager.cookie_name,
|
||||||
|
value=token,
|
||||||
|
httponly=True,
|
||||||
|
samesite="strict",
|
||||||
|
expires=int(delta.total_seconds()),
|
||||||
|
)
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
@route.get("/logout")
|
||||||
|
async def logout_view(
|
||||||
|
request: Request,
|
||||||
|
config: Config = Depends(get_config),
|
||||||
|
user: User | None = Depends(get_manager),
|
||||||
|
):
|
||||||
|
if config.general.unauthenticated_access == "all":
|
||||||
|
return RedirectResponse(
|
||||||
|
request.url_for("get_severity_counts_view"),
|
||||||
|
status_code=status.HTTP_303_SEE_OTHER,
|
||||||
|
)
|
||||||
|
|
||||||
|
response = RedirectResponse(
|
||||||
|
request.url_for("login_view").include_query_params(msg="logout"),
|
||||||
|
status_code=status.HTTP_303_SEE_OTHER,
|
||||||
|
)
|
||||||
|
response.delete_cookie(key="access-token")
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
@route.get("/")
|
@route.get("/")
|
||||||
async def get_severity_counts_view(
|
async def get_severity_counts_view(
|
||||||
request: Request,
|
request: Request,
|
||||||
|
user: User | None = Depends(get_manager),
|
||||||
db: Session = Depends(get_db),
|
db: Session = Depends(get_db),
|
||||||
auto_refresh_enabled: Annotated[bool, Cookie()] = False,
|
auto_refresh_enabled: Annotated[bool, Cookie()] = False,
|
||||||
auto_refresh_seconds: Annotated[int, Cookie()] = 15,
|
auto_refresh_seconds: Annotated[int, Cookie()] = 15,
|
||||||
|
@ -43,17 +179,27 @@ async def get_severity_counts_view(
|
||||||
"agents": agents,
|
"agents": agents,
|
||||||
"auto_refresh_enabled": auto_refresh_enabled,
|
"auto_refresh_enabled": auto_refresh_enabled,
|
||||||
"auto_refresh_seconds": auto_refresh_seconds,
|
"auto_refresh_seconds": auto_refresh_seconds,
|
||||||
|
"user": user,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@route.get("/domains")
|
@route.get("/domains")
|
||||||
async def get_domains_view(request: Request, db: Session = Depends(get_db)):
|
async def get_domains_view(
|
||||||
|
request: Request,
|
||||||
|
user: User | None = Depends(get_manager),
|
||||||
|
config: Config = Depends(get_config),
|
||||||
|
db: Session = Depends(get_db),
|
||||||
|
):
|
||||||
"""Show all tasks and their current state"""
|
"""Show all tasks and their current state"""
|
||||||
|
if config.general.unauthenticated_access == "dashboard":
|
||||||
|
if user is None:
|
||||||
|
raise NotAuthenticatedException
|
||||||
|
|
||||||
tasks = db.query(Task).all()
|
tasks = db.query(Task).all()
|
||||||
|
|
||||||
domains_severities = defaultdict(list)
|
domains_severities = defaultdict(list)
|
||||||
domains_last_checks = defaultdict(list)
|
domains_last_checks = defaultdict(list) # type: ignore[var-annotated]
|
||||||
|
|
||||||
for task in tasks:
|
for task in tasks:
|
||||||
domain = urlparse(task.url).netloc
|
domain = urlparse(task.url).netloc
|
||||||
|
@ -90,29 +236,58 @@ async def get_domains_view(request: Request, db: Session = Depends(get_db)):
|
||||||
"last_checks": domains_last_checks,
|
"last_checks": domains_last_checks,
|
||||||
"total_task_count": len(tasks),
|
"total_task_count": len(tasks),
|
||||||
"agents": agents,
|
"agents": agents,
|
||||||
|
"user": user,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@route.get("/domain/{domain}")
|
@route.get("/domain/{domain}")
|
||||||
async def get_domain_tasks_view(
|
async def get_domain_tasks_view(
|
||||||
request: Request, domain: str, db: Session = Depends(get_db)
|
request: Request,
|
||||||
|
domain: str,
|
||||||
|
user: User | None = Depends(get_manager),
|
||||||
|
config: Config = Depends(get_config),
|
||||||
|
db: Session = Depends(get_db),
|
||||||
):
|
):
|
||||||
"""Show all tasks attached to a domain"""
|
"""Show all tasks attached to a domain"""
|
||||||
|
if config.general.unauthenticated_access == "dashboard":
|
||||||
|
if user is None:
|
||||||
|
raise NotAuthenticatedException
|
||||||
|
|
||||||
tasks = db.query(Task).filter(Task.domain.contains(f"//{domain}")).all()
|
tasks = db.query(Task).filter(Task.domain.contains(f"//{domain}")).all()
|
||||||
return templates.TemplateResponse(
|
return templates.TemplateResponse(
|
||||||
"domain.html", {"request": request, "domain": domain, "tasks": tasks}
|
"domain.html",
|
||||||
|
{
|
||||||
|
"request": request,
|
||||||
|
"domain": domain,
|
||||||
|
"tasks": tasks,
|
||||||
|
"user": user,
|
||||||
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@route.get("/result/{result_id}")
|
@route.get("/result/{result_id}")
|
||||||
async def get_result_view(
|
async def get_result_view(
|
||||||
request: Request, result_id: int, db: Session = Depends(get_db)
|
request: Request,
|
||||||
|
result_id: int,
|
||||||
|
user: User | None = Depends(get_manager),
|
||||||
|
config: Config = Depends(get_config),
|
||||||
|
db: Session = Depends(get_db),
|
||||||
):
|
):
|
||||||
"""Show the details of a result"""
|
"""Show the details of a result"""
|
||||||
|
if config.general.unauthenticated_access == "dashboard":
|
||||||
|
if user is None:
|
||||||
|
raise NotAuthenticatedException
|
||||||
|
|
||||||
result = db.query(Result).get(result_id)
|
result = db.query(Result).get(result_id)
|
||||||
return templates.TemplateResponse(
|
return templates.TemplateResponse(
|
||||||
"result.html", {"request": request, "result": result}
|
"result.html",
|
||||||
|
{
|
||||||
|
"request": request,
|
||||||
|
"result": result,
|
||||||
|
"error": Status.ERROR,
|
||||||
|
"user": user,
|
||||||
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -120,10 +295,15 @@ async def get_result_view(
|
||||||
async def get_task_results_view(
|
async def get_task_results_view(
|
||||||
request: Request,
|
request: Request,
|
||||||
task_id: int,
|
task_id: int,
|
||||||
|
user: User | None = Depends(get_manager),
|
||||||
db: Session = Depends(get_db),
|
db: Session = Depends(get_db),
|
||||||
config: Config = Depends(get_config),
|
config: Config = Depends(get_config),
|
||||||
):
|
):
|
||||||
"""Show history of a task’s results"""
|
"""Show history of a task’s results"""
|
||||||
|
if config.general.unauthenticated_access == "dashboard":
|
||||||
|
if user is None:
|
||||||
|
raise NotAuthenticatedException
|
||||||
|
|
||||||
results = (
|
results = (
|
||||||
db.query(Result)
|
db.query(Result)
|
||||||
.filter(Result.task_id == task_id)
|
.filter(Result.task_id == task_id)
|
||||||
|
@ -131,7 +311,9 @@ async def get_task_results_view(
|
||||||
.all()
|
.all()
|
||||||
)
|
)
|
||||||
task = db.query(Task).get(task_id)
|
task = db.query(Task).get(task_id)
|
||||||
description = task.get_check().get_description(config)
|
description = ""
|
||||||
|
if task is not None:
|
||||||
|
description = task.get_check().get_description(config)
|
||||||
return templates.TemplateResponse(
|
return templates.TemplateResponse(
|
||||||
"results.html",
|
"results.html",
|
||||||
{
|
{
|
||||||
|
@ -139,13 +321,24 @@ async def get_task_results_view(
|
||||||
"results": results,
|
"results": results,
|
||||||
"task": task,
|
"task": task,
|
||||||
"description": description,
|
"description": description,
|
||||||
|
"error": Status.ERROR,
|
||||||
|
"user": user,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@route.get("/agents")
|
@route.get("/agents")
|
||||||
async def get_agents_view(request: Request, db: Session = Depends(get_db)):
|
async def get_agents_view(
|
||||||
|
request: Request,
|
||||||
|
user: User | None = Depends(get_manager),
|
||||||
|
config: Config = Depends(get_config),
|
||||||
|
db: Session = Depends(get_db),
|
||||||
|
):
|
||||||
"""Show argos agents and the last time the server saw them"""
|
"""Show argos agents and the last time the server saw them"""
|
||||||
|
if config.general.unauthenticated_access == "dashboard":
|
||||||
|
if user is None:
|
||||||
|
raise NotAuthenticatedException
|
||||||
|
|
||||||
last_seen = (
|
last_seen = (
|
||||||
db.query(Result.agent_id, func.max(Result.submitted_at).label("submitted_at"))
|
db.query(Result.agent_id, func.max(Result.submitted_at).label("submitted_at"))
|
||||||
.group_by(Result.agent_id)
|
.group_by(Result.agent_id)
|
||||||
|
@ -153,13 +346,19 @@ async def get_agents_view(request: Request, db: Session = Depends(get_db)):
|
||||||
)
|
)
|
||||||
|
|
||||||
return templates.TemplateResponse(
|
return templates.TemplateResponse(
|
||||||
"agents.html", {"request": request, "last_seen": last_seen}
|
"agents.html",
|
||||||
|
{
|
||||||
|
"request": request,
|
||||||
|
"last_seen": last_seen,
|
||||||
|
"user": user,
|
||||||
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@route.post("/refresh")
|
@route.post("/refresh")
|
||||||
async def set_refresh_cookies_view(
|
async def set_refresh_cookies_view(
|
||||||
request: Request,
|
request: Request,
|
||||||
|
user: User | None = Depends(get_manager),
|
||||||
auto_refresh_enabled: Annotated[bool, Form()] = False,
|
auto_refresh_enabled: Annotated[bool, Form()] = False,
|
||||||
auto_refresh_seconds: Annotated[int, Form()] = 15,
|
auto_refresh_seconds: Annotated[int, Form()] = 15,
|
||||||
):
|
):
|
||||||
|
@ -167,6 +366,21 @@ async def set_refresh_cookies_view(
|
||||||
request.url_for("get_severity_counts_view"),
|
request.url_for("get_severity_counts_view"),
|
||||||
status_code=status.HTTP_303_SEE_OTHER,
|
status_code=status.HTTP_303_SEE_OTHER,
|
||||||
)
|
)
|
||||||
response.set_cookie(key="auto_refresh_enabled", value=auto_refresh_enabled)
|
# Cookies’ age in Chrome can’t be more than 400 days
|
||||||
response.set_cookie(key="auto_refresh_seconds", value=int(auto_refresh_seconds))
|
# https://developer.chrome.com/blog/cookie-max-age-expires
|
||||||
|
delta = int(timedelta(days=400).total_seconds())
|
||||||
|
response.set_cookie(
|
||||||
|
key="auto_refresh_enabled",
|
||||||
|
value=str(auto_refresh_enabled),
|
||||||
|
httponly=True,
|
||||||
|
samesite="strict",
|
||||||
|
expires=delta,
|
||||||
|
)
|
||||||
|
response.set_cookie(
|
||||||
|
key="auto_refresh_seconds",
|
||||||
|
value=str(max(5, int(auto_refresh_seconds))),
|
||||||
|
httponly=True,
|
||||||
|
samesite="strict",
|
||||||
|
expires=delta,
|
||||||
|
)
|
||||||
return response
|
return response
|
||||||
|
|
|
@ -1,85 +1,35 @@
|
||||||
"""Pydantic schemas for server"""
|
"""Pydantic schemas for server"""
|
||||||
import os
|
import sys
|
||||||
from functools import lru_cache
|
from pathlib import Path
|
||||||
from os import environ
|
|
||||||
from typing import Optional, Union
|
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
|
||||||
from yamlinclude import YamlIncludeConstructor
|
from yamlinclude import YamlIncludeConstructor
|
||||||
|
from pydantic import ValidationError
|
||||||
|
|
||||||
|
from argos.logging import logger
|
||||||
from argos.schemas.config import Config
|
from argos.schemas.config import Config
|
||||||
|
|
||||||
|
|
||||||
class Settings(BaseSettings):
|
def read_config(yaml_file):
|
||||||
model_config = SettingsConfigDict(env_prefix="argos_", env_file=".env")
|
try:
|
||||||
app_env: str
|
config = read_yaml_config(yaml_file)
|
||||||
database_url: str
|
return config
|
||||||
yaml_file: str
|
except ValidationError as err:
|
||||||
db_pool_size: Optional[int]
|
logger.error("Errors where found while reading configuration:")
|
||||||
db_max_overflow: Optional[int]
|
for error in err.errors():
|
||||||
|
logger.error("%s is %s", error["loc"], error["type"])
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
class DevSettings(Settings):
|
def read_yaml_config(filename: str) -> Config:
|
||||||
"""Settings for dev environment.
|
|
||||||
|
|
||||||
Uses config.yaml as config file.
|
|
||||||
Uses a SQLite database."""
|
|
||||||
|
|
||||||
app_env: str = "dev"
|
|
||||||
yaml_file: str = "config.yaml"
|
|
||||||
db_pool_size: Optional[int] = None
|
|
||||||
db_max_overflow: Optional[int] = None
|
|
||||||
database_url: str = "sqlite:////tmp/argos.db"
|
|
||||||
|
|
||||||
|
|
||||||
class TestSettings(Settings):
|
|
||||||
"""Settings for test environment.
|
|
||||||
|
|
||||||
Uses tests/config.yaml as config file.
|
|
||||||
Uses a SQLite database."""
|
|
||||||
|
|
||||||
app_env: str = "test"
|
|
||||||
yaml_file: str = "tests/config.yaml"
|
|
||||||
database_url: str = "sqlite:////tmp/test-argos.db"
|
|
||||||
db_pool_size: Optional[int] = None
|
|
||||||
db_max_overflow: Optional[int] = None
|
|
||||||
|
|
||||||
|
|
||||||
class ProdSettings(Settings):
|
|
||||||
"""Settings for prod environment."""
|
|
||||||
|
|
||||||
app_env: str = "prod"
|
|
||||||
db_pool_size: Optional[int] = 10
|
|
||||||
db_max_overflow: Optional[int] = 20
|
|
||||||
|
|
||||||
|
|
||||||
environments = {
|
|
||||||
"dev": DevSettings,
|
|
||||||
"prod": ProdSettings,
|
|
||||||
"test": TestSettings,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@lru_cache()
|
|
||||||
def get_app_settings() -> Union[None, Settings]:
|
|
||||||
"""Load settings depending on the environment"""
|
|
||||||
app_env = environ.get("ARGOS_APP_ENV", "dev")
|
|
||||||
settings = environments.get(app_env)
|
|
||||||
if settings is not None:
|
|
||||||
return settings()
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def read_yaml_config(filename):
|
|
||||||
parsed = _load_yaml(filename)
|
parsed = _load_yaml(filename)
|
||||||
return Config(**parsed)
|
return Config(**parsed)
|
||||||
|
|
||||||
|
|
||||||
def _load_yaml(filename):
|
def _load_yaml(filename: str):
|
||||||
base_dir = os.path.dirname(filename)
|
base_dir = Path(filename).resolve().parent
|
||||||
YamlIncludeConstructor.add_to_loader_class(
|
YamlIncludeConstructor.add_to_loader_class(
|
||||||
loader_class=yaml.FullLoader, base_dir=base_dir
|
loader_class=yaml.FullLoader, base_dir=str(base_dir)
|
||||||
)
|
)
|
||||||
|
|
||||||
with open(filename, "r", encoding="utf-8") as stream:
|
with open(filename, "r", encoding="utf-8") as stream:
|
||||||
|
|
7
argos/server/static/pico.min.css
vendored
7
argos/server/static/pico.min.css
vendored
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -1,19 +1,32 @@
|
||||||
@import url("pico.min.css");
|
@import url("pico.min.css");
|
||||||
|
|
||||||
|
.display-small {
|
||||||
|
display: none;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
@media (max-width: 767px) {
|
||||||
|
.display-large {
|
||||||
|
display: none !important;
|
||||||
|
}
|
||||||
|
.display-small {
|
||||||
|
display: block;
|
||||||
|
}
|
||||||
|
.display-small article {
|
||||||
|
display: inline-block;
|
||||||
|
width: 24%;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
code {
|
code {
|
||||||
white-space: pre-wrap;
|
white-space: pre-wrap;
|
||||||
}
|
}
|
||||||
|
|
||||||
body > header,
|
|
||||||
body > main {
|
|
||||||
padding: 0 !important;
|
|
||||||
}
|
|
||||||
#title {
|
#title {
|
||||||
margin-bottom: 0;
|
margin-bottom: 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
h2 {
|
h2 {
|
||||||
margin-bottom: calc(var(--typography-spacing-vertical) * 0.5);
|
margin-bottom: calc(var(--pico-typography-spacing-vertical) * 0.5);
|
||||||
}
|
}
|
||||||
|
|
||||||
.grid-index {
|
.grid-index {
|
||||||
|
@ -26,10 +39,12 @@ h2 {
|
||||||
.grid-index article {
|
.grid-index article {
|
||||||
margin-top: 0;
|
margin-top: 0;
|
||||||
margin-bottom: 1rem;
|
margin-bottom: 1rem;
|
||||||
padding-bottom: calc(var(--block-spacing-vertical) * 0.7);
|
padding-bottom: calc(var(--pico-block-spacing-vertical) * 2.4);
|
||||||
}
|
}
|
||||||
.grid-index article > header {
|
.grid-index article > header {
|
||||||
margin-bottom: calc(var(--block-spacing-vertical) * 0.7);
|
padding-top: calc(var(--pico-block-spacing-vertical) * 2.4);
|
||||||
|
padding-bottom: calc(var(--pico-block-spacing-vertical) * 2.4);
|
||||||
|
margin-bottom: calc(var(--pico-block-spacing-vertical) * 2.4);
|
||||||
}
|
}
|
||||||
|
|
||||||
label[for="select-status"] {
|
label[for="select-status"] {
|
||||||
|
@ -51,3 +66,7 @@ label[for="select-status"] {
|
||||||
#refresh-delay {
|
#refresh-delay {
|
||||||
max-width: 120px;
|
max-width: 120px;
|
||||||
}
|
}
|
||||||
|
/* Remove chevron on menu */
|
||||||
|
#nav-menu summary::after {
|
||||||
|
background-image: none !important;
|
||||||
|
}
|
||||||
|
|
|
@ -3,6 +3,10 @@
|
||||||
<head>
|
<head>
|
||||||
<meta charset="utf-8">
|
<meta charset="utf-8">
|
||||||
<title>Argos</title>
|
<title>Argos</title>
|
||||||
|
<meta name="description"
|
||||||
|
content="Argos monitoring">
|
||||||
|
<meta name="keywords"
|
||||||
|
content="argos, monitoring">
|
||||||
<link rel="shortcut icon"
|
<link rel="shortcut icon"
|
||||||
href="{{ url_for('static', path='/logo.png') }}">
|
href="{{ url_for('static', path='/logo.png') }}">
|
||||||
<meta name="viewport"
|
<meta name="viewport"
|
||||||
|
@ -12,14 +16,14 @@
|
||||||
{% if auto_refresh_enabled %}
|
{% if auto_refresh_enabled %}
|
||||||
<meta http-equiv="refresh"
|
<meta http-equiv="refresh"
|
||||||
content="{{ auto_refresh_seconds }}">
|
content="{{ auto_refresh_seconds }}">
|
||||||
{% endif %}
|
{%- endif %}
|
||||||
<link rel="stylesheet"
|
<link rel="stylesheet"
|
||||||
href="{{ url_for('static', path='/styles.css') }}">
|
href="{{ url_for('static', path='/styles.css') }}">
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<header class="container">
|
<header class="container">
|
||||||
<nav>
|
<nav>
|
||||||
<a href="{{ url_for('get_severity_counts_view') }}">
|
<a href="{{ url_for("get_severity_counts_view") }}">
|
||||||
<ul>
|
<ul>
|
||||||
<li>
|
<li>
|
||||||
<img src="{{ url_for('static', path='/logo-64.png') }}"
|
<img src="{{ url_for('static', path='/logo-64.png') }}"
|
||||||
|
@ -32,31 +36,67 @@
|
||||||
</li>
|
</li>
|
||||||
</ul>
|
</ul>
|
||||||
</a>
|
</a>
|
||||||
|
{% if request.url.remove_query_params('msg') != url_for('login_view') %}
|
||||||
<ul>
|
<ul>
|
||||||
<li>
|
<li>
|
||||||
<a href="{{ url_for('get_severity_counts_view') }}"
|
<details id="nav-menu" class="dropdown">
|
||||||
class="outline {{ 'contrast' if request.url == url_for('get_severity_counts_view') }}"
|
<summary autofocus>Menu</summary>
|
||||||
role="button">
|
<ul dir="rtl">
|
||||||
Dashboard
|
<li>
|
||||||
</a>
|
<a href="{{ url_for('get_severity_counts_view') }}"
|
||||||
<a href="{{ url_for('get_domains_view') }}"
|
class="outline {{ 'contrast' if request.url == url_for('get_severity_counts_view') }}"
|
||||||
class="outline {{ 'contrast' if request.url == url_for('get_domains_view') }}"
|
role="button">
|
||||||
role="button">
|
Dashboard
|
||||||
Domains
|
</a>
|
||||||
</a>
|
</li>
|
||||||
<a href="{{ url_for('get_agents_view') }}"
|
<li>
|
||||||
class="outline {{ 'contrast' if request.url == url_for('get_agents_view') }}"
|
<a href="{{ url_for('get_domains_view') }}"
|
||||||
role="button">
|
class="outline {{ 'contrast' if request.url == url_for('get_domains_view') }}"
|
||||||
Agents
|
role="button">
|
||||||
</a>
|
Domains
|
||||||
<a href="#"
|
</a>
|
||||||
id="reschedule-all"
|
</li>
|
||||||
class="outline"
|
<li>
|
||||||
title="Reschedule non-ok checks as soon as possible">
|
<a href="{{ url_for('get_agents_view') }}"
|
||||||
🕐
|
class="outline {{ 'contrast' if request.url == url_for('get_agents_view') }}"
|
||||||
</a>
|
role="button">
|
||||||
|
Agents
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
{% set unauthenticated_access = request.app.state.config.general.unauthenticated_access %}
|
||||||
|
{% if (user is defined and user is not none) or unauthenticated_access == "all" %}
|
||||||
|
<li>
|
||||||
|
<a href="#"
|
||||||
|
id="reschedule-all"
|
||||||
|
class="outline"
|
||||||
|
title="Reschedule non-ok checks as soon as possible"
|
||||||
|
role="button">
|
||||||
|
Reschedule non-ok checks
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
{% endif %}
|
||||||
|
{% if user is defined and user is not none %}
|
||||||
|
<li>
|
||||||
|
<a href="{{ url_for('logout_view') }}"
|
||||||
|
class="outline }}"
|
||||||
|
role="button">
|
||||||
|
Logout
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
{% elif unauthenticated_access != "all" %}
|
||||||
|
<li>
|
||||||
|
<a href="{{ url_for('login_view') }}"
|
||||||
|
class="outline }}"
|
||||||
|
role="button">
|
||||||
|
Login
|
||||||
|
</a>
|
||||||
|
</li>
|
||||||
|
{% endif %}
|
||||||
|
</ul>
|
||||||
|
</details>
|
||||||
</li>
|
</li>
|
||||||
</ul>
|
</ul>
|
||||||
|
{% endif %}
|
||||||
</nav>
|
</nav>
|
||||||
</header>
|
</header>
|
||||||
<main class="container">
|
<main class="container">
|
||||||
|
@ -71,30 +111,33 @@
|
||||||
</div>
|
</div>
|
||||||
</main>
|
</main>
|
||||||
<footer class="text-center">
|
<footer class="text-center">
|
||||||
<a href="https://framasoft.frama.io/framaspace/argos/">Argos Panoptès</a>,
|
<a href="https://argos-monitoring.framasoft.org/">Argos Panoptès</a>,
|
||||||
<a href="https://framagit.org/framasoft/framaspace/argos/-/blob/main/LICENSE">AGPLv3</a>
|
<a href="https://framagit.org/framasoft/framaspace/argos/-/blob/main/LICENSE">AGPLv3</a>
|
||||||
(<a href="https://framagit.org/framasoft/framaspace/argos">sources</a>)
|
(<a href="https://framagit.org/framasoft/framaspace/argos">sources</a>)
|
||||||
<br/>
|
<br>
|
||||||
API documentation:
|
API documentation:
|
||||||
<a href="{{ url_for('get_severity_counts_view') }}docs">Swagger</a>
|
<a href="{{ url_for("get_severity_counts_view") }}docs">Swagger</a>
|
||||||
or
|
or
|
||||||
<a href="{{ url_for('get_severity_counts_view') }}redoc">Redoc</a>
|
<a href="{{ url_for("get_severity_counts_view") }}redoc">Redoc</a>
|
||||||
</footer>
|
</footer>
|
||||||
<script>
|
{% if request.url.remove_query_params('msg') != url_for('login_view') %}
|
||||||
async function rescheduleAll() {
|
<script>
|
||||||
const response = await fetch('{{ url_for("reschedule_all") }}', {method: 'POST'});
|
async function rescheduleAll() {
|
||||||
const json = await response.json();
|
const response = await fetch('{{ url_for("reschedule_all") }}', {method: 'POST'});
|
||||||
const dialog = document.getElementById('msg');
|
const json = await response.json();
|
||||||
dialog.innerText = json.msg;
|
const dialog = document.getElementById('msg');
|
||||||
dialog.setAttribute('open', '');
|
dialog.innerText = json.msg;
|
||||||
setTimeout(() => {
|
dialog.setAttribute('open', '');
|
||||||
dialog.removeAttribute('open');
|
setTimeout(() => {
|
||||||
}, 1500);
|
dialog.removeAttribute('open');
|
||||||
}
|
}, 1500);
|
||||||
document.getElementById('reschedule-all').addEventListener('click', event => {
|
}
|
||||||
event.preventDefault();
|
document.getElementById('reschedule-all').addEventListener('click', event => {
|
||||||
rescheduleAll();
|
event.preventDefault();
|
||||||
});
|
rescheduleAll();
|
||||||
</script>
|
document.getElementById('nav-menu').open = false;
|
||||||
|
});
|
||||||
|
</script>
|
||||||
|
{% endif %}
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|
|
@ -1,24 +1,23 @@
|
||||||
{% extends "base.html" %}
|
{% extends "base.html" %}
|
||||||
{% block title %}<h2>{{ domain }}</h2>{% endblock title %}
|
{% block title %}<h2>{{ domain }}</h2>{% endblock title %}
|
||||||
{% block content %}
|
{% block content %}
|
||||||
<div id="domains" class="frame">
|
<div id="domains" class="overflow-auto">
|
||||||
<table id="domains-list" role="grid">
|
<table id="domains-list" role="grid" class="striped">
|
||||||
<thead>
|
<thead>
|
||||||
<tr>
|
<tr>
|
||||||
<th>URL</th>
|
<th scope="col">URL</th>
|
||||||
<th>Check</th>
|
<th scope="col">Check</th>
|
||||||
<th>Expected</th>
|
<th scope="col">Current status</th>
|
||||||
<th>Current status</th>
|
<th scope="col">Expected</th>
|
||||||
<th></th>
|
<th scope="col"></th>
|
||||||
</tr>
|
</tr>
|
||||||
</thead>
|
</thead>
|
||||||
|
|
||||||
<tbody id="domains-body">
|
<tbody id="domains-body">
|
||||||
{% for task in tasks %}
|
{% for task in tasks %}
|
||||||
<tr>
|
<tr scope="row">
|
||||||
<td>{{ task.url }}</td>
|
<td>{{ task.url }} (IPv{{ task.ip_version }})</td>
|
||||||
<td>{{ task.check }}</td>
|
<td>{{ task.check }}</td>
|
||||||
<td>{{ task.expected }}</td>
|
|
||||||
<td class="status highlight">
|
<td class="status highlight">
|
||||||
{% if task.status %}
|
{% if task.status %}
|
||||||
<a data-tooltip="Completed at {{ task.completed_at }}"
|
<a data-tooltip="Completed at {{ task.completed_at }}"
|
||||||
|
@ -37,6 +36,7 @@
|
||||||
Waiting to be checked
|
Waiting to be checked
|
||||||
{% endif %}
|
{% endif %}
|
||||||
</td>
|
</td>
|
||||||
|
<td>{{ task.expected }}</td>
|
||||||
<td><a href="{{ url_for('get_task_results_view', task_id=task.id) }}">view all</a></td>
|
<td><a href="{{ url_for('get_task_results_view', task_id=task.id) }}">view all</a></td>
|
||||||
</tr>
|
</tr>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
|
|
|
@ -12,15 +12,25 @@
|
||||||
</a>
|
</a>
|
||||||
</li>
|
</li>
|
||||||
</ul>
|
</ul>
|
||||||
<ul>
|
{# djlint:off H021 #}
|
||||||
|
<ul id="js-only" style="display: none; ">{# djlint:on #}
|
||||||
|
<li>
|
||||||
|
<input id="domain-search"
|
||||||
|
type="search"
|
||||||
|
spellcheck="false"
|
||||||
|
placeholder="Filter domains list"
|
||||||
|
aria-label="Filter domains list"
|
||||||
|
/>
|
||||||
|
</li>
|
||||||
<li>
|
<li>
|
||||||
<label for="select-status">Show domains with status:</label>
|
<label for="select-status">Show domains with status:</label>
|
||||||
<select id="select-status">
|
<select id="select-status">
|
||||||
<option value="all">All</option>
|
<option value="not-ok" selected>Not OK</option>
|
||||||
<option value="ok">✅ OK</option>
|
<option value="ok">✅ OK</option>
|
||||||
<option value="warning">⚠️ Warning</option>
|
<option value="warning">⚠️ Warning</option>
|
||||||
<option value="critical">❌ Critical</option>
|
<option value="critical">❌ Critical</option>
|
||||||
<option value="unknown">❔ Unknown</option>
|
<option value="unknown">❔ Unknown</option>
|
||||||
|
<option value="all">All</option>
|
||||||
</select>
|
</select>
|
||||||
</li>
|
</li>
|
||||||
</ul>
|
</ul>
|
||||||
|
@ -36,7 +46,8 @@
|
||||||
|
|
||||||
<tbody id="domains-body">
|
<tbody id="domains-body">
|
||||||
{% for (domain, status) in domains %}
|
{% for (domain, status) in domains %}
|
||||||
<tr data-status={{ status }}>
|
<tr data-status="{{ status }}"
|
||||||
|
data-domain="{{ domain }}">
|
||||||
<td>
|
<td>
|
||||||
<a href="{{ url_for('get_domain_tasks_view', domain=domain) }}">
|
<a href="{{ url_for('get_domain_tasks_view', domain=domain) }}">
|
||||||
{{ domain }}
|
{{ domain }}
|
||||||
|
@ -60,20 +71,47 @@
|
||||||
</table>
|
</table>
|
||||||
</div>
|
</div>
|
||||||
<script>
|
<script>
|
||||||
document.getElementById('select-status').addEventListener('change', (e) => {
|
function filterDomains() {
|
||||||
if (e.currentTarget.value === 'all') {
|
let status = document.getElementById('select-status');
|
||||||
|
let filter = document.getElementById('domain-search').value;
|
||||||
|
console.log(filter)
|
||||||
|
if (status.value === 'all') {
|
||||||
document.querySelectorAll('[data-status]').forEach((item) => {
|
document.querySelectorAll('[data-status]').forEach((item) => {
|
||||||
item.style.display = null;
|
if (filter && item.dataset.domain.indexOf(filter) == -1) {
|
||||||
|
item.style.display = 'none';
|
||||||
|
} else {
|
||||||
|
item.style.display = null;
|
||||||
|
}
|
||||||
|
})
|
||||||
|
} else if (status.value === 'not-ok') {
|
||||||
|
document.querySelectorAll('[data-status]').forEach((item) => {
|
||||||
|
if (item.dataset.status !== 'ok') {
|
||||||
|
if (filter && item.dataset.domain.indexOf(filter) == -1) {
|
||||||
|
item.style.display = 'none';
|
||||||
|
} else {
|
||||||
|
item.style.display = null;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
item.style.display = 'none';
|
||||||
|
}
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
document.querySelectorAll('[data-status]').forEach((item) => {
|
document.querySelectorAll('[data-status]').forEach((item) => {
|
||||||
if (item.dataset.status === e.currentTarget.value) {
|
if (item.dataset.status === status.value) {
|
||||||
item.style.display = null;
|
if (filter && item.dataset.domain.indexOf(filter) == -1) {
|
||||||
|
item.style.display = 'none';
|
||||||
|
} else {
|
||||||
|
item.style.display = null;
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
item.style.display = 'none';
|
item.style.display = 'none';
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
});
|
}
|
||||||
|
document.getElementById('select-status').addEventListener('change', filterDomains);
|
||||||
|
document.getElementById('domain-search').addEventListener('input', filterDomains);
|
||||||
|
filterDomains()
|
||||||
|
document.getElementById('js-only').style.display = null;
|
||||||
</script>
|
</script>
|
||||||
{% endblock content %}
|
{% endblock content %}
|
||||||
|
|
|
@ -1,11 +1,13 @@
|
||||||
{% extends "base.html" %}
|
{% extends "base.html" %}
|
||||||
{% block title %}<h2>Dashboard</h2>{% endblock title %}
|
{% block title %}
|
||||||
|
<h2>Dashboard</h2>
|
||||||
|
{% endblock title %}
|
||||||
{% block content %}
|
{% block content %}
|
||||||
<div id="domains" class="frame">
|
<div id="domains" class="frame">
|
||||||
<nav>
|
<nav>
|
||||||
<ul>
|
<ul>
|
||||||
<li>
|
<li>
|
||||||
<a href="{{ url_for('get_agents_view') }}">
|
<a href="{{ url_for("get_agents_view") }}">
|
||||||
{{ agents | length }} agent{{ 's' if agents | length > 1 }}
|
{{ agents | length }} agent{{ 's' if agents | length > 1 }}
|
||||||
</a>
|
</a>
|
||||||
</li>
|
</li>
|
||||||
|
@ -21,46 +23,77 @@
|
||||||
</li>
|
</li>
|
||||||
<li>
|
<li>
|
||||||
<label class="inline-label">
|
<label class="inline-label">
|
||||||
Every <input id="refresh-delay"
|
Every
|
||||||
class="initial-width"
|
<input id="refresh-delay"
|
||||||
name="auto_refresh_seconds"
|
class="initial-width"
|
||||||
type="number"
|
name="auto_refresh_seconds"
|
||||||
form="refresh-form"
|
type="number"
|
||||||
min="1"
|
form="refresh-form"
|
||||||
value="{{ auto_refresh_seconds }}"> seconds
|
min="5"
|
||||||
|
value="{{ auto_refresh_seconds }}">
|
||||||
|
seconds
|
||||||
</label>
|
</label>
|
||||||
</li>
|
</li>
|
||||||
<li>
|
<li>
|
||||||
<form id="refresh-form"
|
<form id="refresh-form"
|
||||||
method="post"
|
method="post"
|
||||||
action="{{ url_for('set_refresh_cookies_view') }}">
|
action="{{ url_for("set_refresh_cookies_view") }}">
|
||||||
<input type="Submit">
|
<input type="Submit">
|
||||||
</form>
|
</form>
|
||||||
</li>
|
</li>
|
||||||
</ul>
|
</ul>
|
||||||
</nav>
|
</nav>
|
||||||
|
|
||||||
<div class="container">
|
<div class="container">
|
||||||
<div class="grid grid-index">
|
<div class="display-small">
|
||||||
|
<article title="Unknown">
|
||||||
|
❔
|
||||||
|
<br>
|
||||||
|
{{ counts_dict['unknown'] }}
|
||||||
|
</article>
|
||||||
|
<article title="OK">
|
||||||
|
✅
|
||||||
|
<br>
|
||||||
|
{{ counts_dict['ok'] }}
|
||||||
|
</article>
|
||||||
|
<article title="Warning">
|
||||||
|
⚠️
|
||||||
|
<br>
|
||||||
|
{{ counts_dict['warning'] }}
|
||||||
|
</article>
|
||||||
|
<article title="Critical">
|
||||||
|
❌
|
||||||
|
<br>
|
||||||
|
{{ counts_dict['critical'] }}
|
||||||
|
</article>
|
||||||
|
</div>
|
||||||
|
<div class="grid grid-index display-large">
|
||||||
<article>
|
<article>
|
||||||
<header title="Unknown">❔</header>
|
<header title="Unknown">
|
||||||
|
❔
|
||||||
|
</header>
|
||||||
{{ counts_dict['unknown'] }}
|
{{ counts_dict['unknown'] }}
|
||||||
</article>
|
</article>
|
||||||
<article>
|
<article>
|
||||||
<header title="OK">✅</header>
|
<header title="OK">
|
||||||
|
✅
|
||||||
|
</header>
|
||||||
{{ counts_dict['ok'] }}
|
{{ counts_dict['ok'] }}
|
||||||
</article>
|
</article>
|
||||||
<article>
|
<article>
|
||||||
<header title="Warning">⚠️</header>
|
<header title="Warning">
|
||||||
|
⚠️
|
||||||
|
</header>
|
||||||
{{ counts_dict['warning'] }}
|
{{ counts_dict['warning'] }}
|
||||||
</article>
|
</article>
|
||||||
<article>
|
<article>
|
||||||
<header title="Critical">❌</header>
|
<header title="Critical">
|
||||||
|
❌
|
||||||
|
</header>
|
||||||
{{ counts_dict['critical'] }}
|
{{ counts_dict['critical'] }}
|
||||||
</article>
|
</article>
|
||||||
</div>
|
</div>
|
||||||
<p class="text-center">
|
<p class="text-center">
|
||||||
<a href="{{ url_for('get_domains_view') }}"
|
<a href="{{ url_for("get_domains_view") }}"
|
||||||
class="outline"
|
class="outline"
|
||||||
role="button">
|
role="button">
|
||||||
Domains
|
Domains
|
||||||
|
|
33
argos/server/templates/login.html
Normal file
33
argos/server/templates/login.html
Normal file
|
@ -0,0 +1,33 @@
|
||||||
|
{% extends "base.html" %}
|
||||||
|
{% block title %}<h2>Login</h2>{% endblock title %}
|
||||||
|
{% block content %}
|
||||||
|
{% if msg is not none %}
|
||||||
|
<article>{{ msg }}</article>
|
||||||
|
{% endif %}
|
||||||
|
<div class="frame">
|
||||||
|
<label for="username">Username</label>
|
||||||
|
<input id="username"
|
||||||
|
name="username"
|
||||||
|
type="text"
|
||||||
|
form="login"
|
||||||
|
autofocus>
|
||||||
|
<label for="password">Password</label>
|
||||||
|
<input id="password"
|
||||||
|
name="password"
|
||||||
|
type="password"
|
||||||
|
form="login">
|
||||||
|
{% if remember is not none %}
|
||||||
|
<label>
|
||||||
|
<input type="checkbox"
|
||||||
|
name="rememberme"
|
||||||
|
form="login">
|
||||||
|
Remember me
|
||||||
|
</label>
|
||||||
|
{% endif %}
|
||||||
|
<form id="login"
|
||||||
|
method="post"
|
||||||
|
action="{{ url_for('post_login') }}">
|
||||||
|
<input type="Submit">
|
||||||
|
</form>
|
||||||
|
</div>
|
||||||
|
{% endblock content %}
|
|
@ -1,9 +1,13 @@
|
||||||
{% extends "base.html" %}
|
{% extends "base.html" %}
|
||||||
{% block title %}<h2>{{ result }}</h2>{% endblock title %}
|
{% block title %}<h2>Result {{ result.id }} - {{ result.status }}</h2>{% endblock title %}
|
||||||
{% block content %}
|
{% block content %}
|
||||||
<dl>
|
<dl>
|
||||||
<dt>Task</dt>
|
<dt>Task</dt>
|
||||||
<dd>{{ result.task }}</dd>
|
<dd>
|
||||||
|
<a href="{{ url_for('get_task_results_view', task_id=result.task.id) }}">
|
||||||
|
{{ result.task }}
|
||||||
|
</a>
|
||||||
|
</dd>
|
||||||
<dt>Submitted at</dt>
|
<dt>Submitted at</dt>
|
||||||
<dd>{{ result.submitted_at }}</dd>
|
<dd>{{ result.submitted_at }}</dd>
|
||||||
<dt>Status</dt>
|
<dt>Status</dt>
|
||||||
|
@ -11,6 +15,26 @@
|
||||||
<dt>Severity</dt>
|
<dt>Severity</dt>
|
||||||
<dd>{{ result.severity }}</dd>
|
<dd>{{ result.severity }}</dd>
|
||||||
<dt>Context</dt>
|
<dt>Context</dt>
|
||||||
<dd>{{ result.context }}</dd>
|
<dd>
|
||||||
|
{% if result.status != error %}
|
||||||
|
{{ result.context }}
|
||||||
|
{% else %}
|
||||||
|
<dl>
|
||||||
|
{% if result.context['error_message'] %}
|
||||||
|
<dt>Error message</dt>
|
||||||
|
<dd>{{ result.context['error_message'] }}</dd>
|
||||||
|
{% endif %}
|
||||||
|
<dt>Error type</dt>
|
||||||
|
<dd>{{ result.context['error_type'] }}</dd>
|
||||||
|
<dt>Error details</dt>
|
||||||
|
<dd>
|
||||||
|
<details>
|
||||||
|
<summary>{{ result.context['error_details'] | truncate(120, False, '…') }} (click to expand)</summary>
|
||||||
|
<pre><code>{{ result.context['error_details'] | replace('\n', '<br>') | safe }}</code></pre>
|
||||||
|
</details>
|
||||||
|
</dd>
|
||||||
|
</dl>
|
||||||
|
{% endif %}
|
||||||
|
</dd>
|
||||||
</dl>
|
</dl>
|
||||||
{% endblock content %}
|
{% endblock content %}
|
||||||
|
|
|
@ -13,14 +13,37 @@
|
||||||
</thead>
|
</thead>
|
||||||
<tbody>
|
<tbody>
|
||||||
{% for result in results %}
|
{% for result in results %}
|
||||||
<tr>
|
<tr id="{{ result.id }}">
|
||||||
<td>{{ result.submitted_at }}</td>
|
<td>
|
||||||
|
<a href="{{ url_for('get_result_view', result_id=result.id) }}" title="See details of result {{ result.id }}">
|
||||||
|
{{ result.submitted_at }}
|
||||||
|
</a>
|
||||||
|
</td>
|
||||||
<td>{{ result.status }}</td>
|
<td>{{ result.status }}</td>
|
||||||
<td>{{ result.severity }}</td>
|
<td>{{ result.severity }}</td>
|
||||||
<td>{{ result.context }}</td>
|
<td>
|
||||||
|
{% if result.status != error %}
|
||||||
|
{{ result.context }}
|
||||||
|
{% else %}
|
||||||
|
<dl>
|
||||||
|
{% if result.context["error_message"] %}
|
||||||
|
<dt>Error message</dt>
|
||||||
|
<dd>{{ result.context["error_message"] }}</dd>
|
||||||
|
{% endif %}
|
||||||
|
<dt>Error type</dt>
|
||||||
|
<dd>{{ result.context["error_type"] }}</dd>
|
||||||
|
<dt>Error details</dt>
|
||||||
|
<dd>
|
||||||
|
<details>
|
||||||
|
<summary>{{ result.context["error_details"] | truncate(120, False, "…") }} (click to expand)</summary>
|
||||||
|
<pre><code>{{ result.context["error_details"] | replace("\n", "<br>") | safe }}</code></pre>
|
||||||
|
</details>
|
||||||
|
</dd>
|
||||||
|
</dl>
|
||||||
|
{% endif %}
|
||||||
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
|
|
||||||
{% endblock content %}
|
{% endblock content %}
|
||||||
|
|
|
@ -1,4 +1,7 @@
|
||||||
ARGOS_YAML_FILE = "my-config.yaml"
|
ARGOS_YAML_FILE="my-config.yaml"
|
||||||
ARGOS_DATABASE_URL = "postgresql://argos:argos@localhost/argos"
|
ARGOS_DATABASE_URL="postgresql://argos:argos@localhost/argos"
|
||||||
DB_POOL_SIZE = 10 # https://docs.sqlalchemy.org/en/20/core/pooling.html#sqlalchemy.pool.QueuePool.params.pool_size
|
|
||||||
DB_MAX_OVERFLOW = 20
|
# https://docs.sqlalchemy.org/en/20/core/pooling.html#sqlalchemy.pool.QueuePool.params.pool_size
|
||||||
|
DB_POOL_SIZE=10
|
||||||
|
# https://docs.sqlalchemy.org/en/20/core/pooling.html#sqlalchemy.pool.QueuePool.params.max_overflow
|
||||||
|
DB_MAX_OVERFLOW=20
|
||||||
|
|
|
@ -1,63 +0,0 @@
|
||||||
general:
|
|
||||||
frequency: "1m" # Run checks every minute.
|
|
||||||
# Which way do you want to be warned when a check goes to that severity?
|
|
||||||
alerts:
|
|
||||||
ok:
|
|
||||||
- local
|
|
||||||
warning:
|
|
||||||
- local
|
|
||||||
critical:
|
|
||||||
- local
|
|
||||||
unknown:
|
|
||||||
- local
|
|
||||||
# mail:
|
|
||||||
# mailfrom: no-reply@example.org
|
|
||||||
# host: 127.0.0.1
|
|
||||||
# port: 25
|
|
||||||
# ssl: False
|
|
||||||
# starttls: False
|
|
||||||
# auth:
|
|
||||||
# login: foo
|
|
||||||
# password: bar
|
|
||||||
# addresses:
|
|
||||||
# - foo@admin.example.org
|
|
||||||
# - bar@admin.example.org
|
|
||||||
# gotify:
|
|
||||||
# - url: https://example.org
|
|
||||||
# tokens:
|
|
||||||
# - foo
|
|
||||||
# - bar
|
|
||||||
|
|
||||||
service:
|
|
||||||
secrets:
|
|
||||||
# Secrets can be generated using `openssl rand -base64 32`.
|
|
||||||
|
|
||||||
ssl:
|
|
||||||
thresholds:
|
|
||||||
- "1d": critical
|
|
||||||
- "5d": warning
|
|
||||||
|
|
||||||
# It's also possible to define the checks in another file
|
|
||||||
# with the include syntax:
|
|
||||||
#
|
|
||||||
# websites: !include websites.yaml
|
|
||||||
#
|
|
||||||
websites:
|
|
||||||
- domain: "https://mypads.example.org"
|
|
||||||
paths:
|
|
||||||
- path: "/mypads/"
|
|
||||||
checks:
|
|
||||||
- status-is: 200
|
|
||||||
- body-contains: '<div id= "mypads"></div>'
|
|
||||||
- ssl-certificate-expiration: "on-check"
|
|
||||||
- path: "/admin/"
|
|
||||||
checks:
|
|
||||||
- status-is: 401
|
|
||||||
- domain: "https://munin.example.org"
|
|
||||||
paths:
|
|
||||||
- path: "/"
|
|
||||||
checks:
|
|
||||||
- status-is: 301
|
|
||||||
- path: "/munin/"
|
|
||||||
checks:
|
|
||||||
- status-is: 401
|
|
1
conf/config-example.yaml
Symbolic link
1
conf/config-example.yaml
Symbolic link
|
@ -0,0 +1 @@
|
||||||
|
../argos/config-example.yaml
|
5
conf/default-argos-agent
Normal file
5
conf/default-argos-agent
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
ARGOS_AGENT_TOKEN=Secret
|
||||||
|
ARGOS_AGENT_SERVER_URL=http://127.0.0.1:8000
|
||||||
|
ARGOS_AGENT_LOGLEVEL=WARNING
|
||||||
|
ARGOS_AGENT_MAX_TASKS=20
|
||||||
|
ARGOS_AGENT_WAIT_TIME=10
|
5
conf/default-argos-server
Normal file
5
conf/default-argos-server
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
ARGOS_YAML_FILE="/etc/argos/config.yaml"
|
||||||
|
ARGOS_SERVER_WORKERS=4
|
||||||
|
ARGOS_SERVER_SOCKET=127.0.0.1:8000
|
||||||
|
# Comma separated list of IP addresses of the web proxy (usually Nginx)
|
||||||
|
ARGOS_SERVER_FORWARDED_ALLOW_IPS=127.0.0.1
|
4
conf/nginx-subdirectory.conf
Normal file
4
conf/nginx-subdirectory.conf
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
location /foo/ {
|
||||||
|
include proxy_params;
|
||||||
|
proxy_pass http://127.0.0.1:8000/;
|
||||||
|
}
|
|
@ -9,21 +9,15 @@ server {
|
||||||
ssl_certificate /etc/letsencrypt/live/argos.example.org/fullchain.pem;
|
ssl_certificate /etc/letsencrypt/live/argos.example.org/fullchain.pem;
|
||||||
ssl_certificate_key /etc/letsencrypt/live/argos.example.org/privkey.pem;
|
ssl_certificate_key /etc/letsencrypt/live/argos.example.org/privkey.pem;
|
||||||
|
|
||||||
access_log /var/log/nginx/argos.example.org.access.log;
|
access_log /var/log/nginx/argos.example.org.access.log;
|
||||||
error_log /var/log/nginx/argos.example.org.error.log;
|
error_log /var/log/nginx/argos.example.org.error.log;
|
||||||
|
|
||||||
if ($scheme != "https") {
|
if ($scheme != "https") {
|
||||||
rewrite ^ https://$http_host$request_uri? permanent;
|
rewrite ^ https://$http_host$request_uri? permanent;
|
||||||
}
|
}
|
||||||
|
|
||||||
location ~ ^/($|domains?/|result/|task/|refresh/) {
|
|
||||||
auth_basic "Closed site";
|
|
||||||
auth_basic_user_file argos.passwd;
|
|
||||||
include proxy_params;
|
|
||||||
proxy_pass http://127.0.0.1:8000;
|
|
||||||
}
|
|
||||||
location / {
|
location / {
|
||||||
include proxy_params;
|
include proxy_params;
|
||||||
proxy_pass http://127.0.0.1:8000;
|
proxy_pass http://127.0.0.1:8000;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,21 +1,17 @@
|
||||||
[Unit]
|
[Unit]
|
||||||
Description=Argos agent
|
Description=Argos agent
|
||||||
Documentation=https://framasoft.frama.io/framaspace/argos/
|
Documentation=https://argos-monitoring.framasoft.org/
|
||||||
Requires=network.target
|
Requires=network.target
|
||||||
After=network.target
|
After=network.target
|
||||||
|
|
||||||
[Service]
|
[Service]
|
||||||
User=www-data
|
User=argos
|
||||||
Environment="ARGOS_AGENT_TOKEN=Secret"
|
EnvironmentFile=/etc/default/argos-agent
|
||||||
Environment="ARGOS_AGENT_SERVER_URL=http://127.0.0.1:8000"
|
WorkingDirectory=/opt/argos/
|
||||||
WorkingDirectory=/var/www/argos/
|
ExecStart=/opt/argos/venv/bin/argos agent --max-tasks $ARGOS_AGENT_MAX_TASKS \
|
||||||
ExecStart=/var/www/argos/venv/bin/argos agent --max-tasks 20 --wait-time 10 --log-level DEBUG
|
--wait-time $ARGOS_AGENT_WAIT_TIME \
|
||||||
|
--log-level $ARGOS_AGENT_LOGLEVEL
|
||||||
SyslogIdentifier=argos-agent
|
SyslogIdentifier=argos-agent
|
||||||
|
|
||||||
[Install]
|
[Install]
|
||||||
WantedBy=multi-user.target
|
WantedBy=multi-user.target
|
||||||
|
|
||||||
# NB: it may be better to
|
|
||||||
# - use a dedicated user
|
|
||||||
# - use a EnvironmentFile=/etc/default/argos-agent in order to enable configuration
|
|
||||||
# changes without doing a systemctl daemon-reload
|
|
||||||
|
|
|
@ -1,24 +1,23 @@
|
||||||
[Unit]
|
[Unit]
|
||||||
Description=Argos server
|
Description=Argos server
|
||||||
Documentation=https://framasoft.frama.io/framaspace/argos/
|
Documentation=https://argos-monitoring.framasoft.org/
|
||||||
Requires=network.target postgresql.service
|
Requires=network.target postgresql.service
|
||||||
After=network.target postgresql.service
|
After=network.target postgresql.service
|
||||||
PartOf=postgresql.service
|
PartOf=postgresql.service
|
||||||
|
|
||||||
[Service]
|
[Service]
|
||||||
User=www-data
|
User=argos
|
||||||
WorkingDirectory=/var/www/argos/
|
WorkingDirectory=/opt/argos/
|
||||||
Environment="ARGOS_SERVER_WORKERS=4"
|
EnvironmentFile=/etc/default/argos-server
|
||||||
Environment="ARGOS_SERVER_SOCKET=127.0.0.1:8000"
|
ExecStartPre=/opt/argos/venv/bin/argos server migrate
|
||||||
ExecStartPre=/var/www/argos/venv/bin/argos server migrate
|
ExecStartPre=/opt/argos/venv/bin/argos server reload-config
|
||||||
ExecStartPre=/var/www/argos/venv/bin/argos server reload-config
|
ExecStart=/opt/argos/venv/bin/gunicorn "argos.server.main:get_application()" \
|
||||||
ExecStart=/var/www/argos/venv/bin/gunicorn "argos.server.main:get_application()" -w $ARGOS_SERVER_WORKERS -k uvicorn.workers.UvicornWorker -b $ARGOS_SERVER_SOCKET
|
--workers $ARGOS_SERVER_WORKERS \
|
||||||
ExecReload=/var/www/argos/venv/bin/argos server reload
|
--worker-class uvicorn.workers.UvicornWorker \
|
||||||
|
--bind $ARGOS_SERVER_SOCKET \
|
||||||
|
--forwarded-allow-ips $ARGOS_SERVER_FORWARDED_ALLOW_IPS
|
||||||
|
ExecReload=/opt/argos/venv/bin/argos server reload-config
|
||||||
SyslogIdentifier=argos-server
|
SyslogIdentifier=argos-server
|
||||||
|
|
||||||
[Install]
|
[Install]
|
||||||
WantedBy=multi-user.target
|
WantedBy=multi-user.target
|
||||||
|
|
||||||
# NB: it may be better to
|
|
||||||
# - use a EnvironmentFile=/etc/default/argos-server in order to enable configuration
|
|
||||||
# changes without doing a systemctl daemon-reload
|
|
||||||
|
|
4
docs/_static/fix-nav.css
vendored
Normal file
4
docs/_static/fix-nav.css
vendored
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
.sy-head-brand img + strong {
|
||||||
|
display: inline;
|
||||||
|
margin-left: 1em;
|
||||||
|
}
|
1
docs/_static/logo.png
vendored
Symbolic link
1
docs/_static/logo.png
vendored
Symbolic link
|
@ -0,0 +1 @@
|
||||||
|
../../argos/server/static/logo.png
|
|
@ -1,3 +1,6 @@
|
||||||
|
---
|
||||||
|
description: Argos exposes a website and an API. This is how to use the API.
|
||||||
|
---
|
||||||
# The HTTP API
|
# The HTTP API
|
||||||
|
|
||||||
Argos exposes a website and an API. The website is available at "/" and the API at "/api".
|
Argos exposes a website and an API. The website is available at "/" and the API at "/api".
|
||||||
|
|
|
@ -1,2 +1,5 @@
|
||||||
|
---
|
||||||
|
description: Last changes in Argos.
|
||||||
|
---
|
||||||
```{include} ../CHANGELOG.md
|
```{include} ../CHANGELOG.md
|
||||||
```
|
```
|
||||||
|
|
113
docs/checks.md
113
docs/checks.md
|
@ -1,6 +1,9 @@
|
||||||
|
---
|
||||||
|
description: Here are the checks that Argos proposes, with a description of what they do and how to configure them.
|
||||||
|
---
|
||||||
# Checks
|
# Checks
|
||||||
|
|
||||||
At its core, argos runs checks and return the results to the service. Here are the implemented checks, with a description of what they do and how to configure them.
|
At its core, Argos runs checks and return the results to the service. Here are the implemented checks, with a description of what they do and how to configure them.
|
||||||
|
|
||||||
## Simple checks
|
## Simple checks
|
||||||
|
|
||||||
|
@ -8,12 +11,22 @@ These checks are the most basic ones. They simply check that the response from t
|
||||||
|
|
||||||
| Check | Description | Configuration |
|
| Check | Description | Configuration |
|
||||||
| --- | --- | --- |
|
| --- | --- | --- |
|
||||||
| `status-is` | Check that the returned status code matches what you expect. | `status-is: "200"` |
|
| `status-is` | Check that the returned status code matches what you expect. | <pre><code>status-is: \"200\"</code></pre> |
|
||||||
| `body-contains` | Check that the returned body contains a given string. | `body-contains: "Hello world"` |
|
| `status-in` | Check that the returned status code is in the list of codes you expect. | <pre><code>status-in:<br> - 200<br> - 302</code></pre> |
|
||||||
|
| `body-contains` | Check that the returned body contains a given string. | <pre><code>body-contains: "Hello world"</code></pre> |
|
||||||
|
| `body-like` | Check that the returned body matches a given regex. | <pre><code>body-like: "Hel+o w.*"</code></pre> |
|
||||||
|
| `headers-contain` | Check that the response contains the expected headers. | <pre><code>headers-contain:<br> - "content-encoding"<br> - "content-type"</code></pre> |
|
||||||
|
| `headers-have` | Check that the response contains the expected headers with the expected value. | <pre><code>headers-have:<br> content-encoding: "gzip"<br> content-type: "text/html"</code></pre> |
|
||||||
|
| `headers-like` | Check that response headers contains the expected headers and that the values matches the provided regexes. | <pre><code>headers-like:<br> content-encoding: "gzip\|utf"<br> content-type: "text/(html\|css)"</code></pre> |
|
||||||
|
| `json-contains` | Check that JSON response contains the expected structure. | <pre><code>json-contains:<br> - /foo/bar/0<br> - /timestamp</code></pre> |
|
||||||
|
| `json-has` | Check that JSON response contains the expected structure and values. | <pre><code>json-has:<br> /maintenance: false<br> /productname: "Nextcloud"</code></pre> |
|
||||||
|
| `json-like` | Check that JSON response contains the expected structure and that the values matches the provided regexes. | <pre><code>json-like:<br> /productname: ".\*cloud"<br> /versionstring: "29\\\\..\*"</code></pre> |
|
||||||
|
| `json-is` | Check that JSON response is the exact expected JSON object. | <pre><code>json-is: '{"foo": "bar", "baz": 42}'</code></pre> |
|
||||||
|
| `http-to-https` | Check that the HTTP version of the domain redirects to HTTPS. Multiple choices of configuration. | <pre><code>http-to-https: true<br>http-to-https: 301<br>http-to-https:<br> start: 301<br> stop: 308<br>http-to-https:<br> - 301<br> - 302<br> - 307</code></pre> |
|
||||||
|
|
||||||
```{code-block} yaml
|
```{code-block} yaml
|
||||||
---
|
---
|
||||||
caption: config.yaml
|
caption: argos-config.yaml
|
||||||
---
|
---
|
||||||
- domain: "https://example.org"
|
- domain: "https://example.org"
|
||||||
paths:
|
paths:
|
||||||
|
@ -21,6 +34,94 @@ caption: config.yaml
|
||||||
checks:
|
checks:
|
||||||
- status-is: 200
|
- status-is: 200
|
||||||
- body-contains: "Hello world"
|
- body-contains: "Hello world"
|
||||||
|
- body-like: "Hel+o w.*"
|
||||||
|
- headers-contain:
|
||||||
|
- "content-encoding"
|
||||||
|
- "content-type"
|
||||||
|
# Check that there is a HTTP to HTTPS redirection with 3xx status code
|
||||||
|
- http-to-https: true
|
||||||
|
# Check that there is a HTTP to HTTPS redirection with 301 status code
|
||||||
|
- http-to-https: 301
|
||||||
|
# Check that there is a HTTP to HTTPS redirection with a status code
|
||||||
|
# in the provided range (stop value excluded)
|
||||||
|
- http-to-https:
|
||||||
|
start: 301
|
||||||
|
stop: 308
|
||||||
|
# Check that there is a HTTP to HTTPS redirection with a status code
|
||||||
|
# in the provided list
|
||||||
|
- http-to-https:
|
||||||
|
- 301
|
||||||
|
- 302
|
||||||
|
- 307
|
||||||
|
- path: "/foobar"
|
||||||
|
checks:
|
||||||
|
- status-in:
|
||||||
|
- 200
|
||||||
|
- 302
|
||||||
|
# It’s VERY important to respect the 4 spaces indentation here!
|
||||||
|
- headers-have:
|
||||||
|
content-encoding: "gzip"
|
||||||
|
content-type: "text/html"
|
||||||
|
# It’s VERY important to respect the 4 spaces indentation here!
|
||||||
|
# You have to double the escape character \
|
||||||
|
- headers-like:
|
||||||
|
content-encoding: "gzip|utf"
|
||||||
|
content-type: "text/(html|css)"
|
||||||
|
- json-contains:
|
||||||
|
- /foo/bar/0
|
||||||
|
- /timestamp
|
||||||
|
# It’s VERY important to respect the 4 spaces indentation here!
|
||||||
|
- json-has:
|
||||||
|
/maintenance: false
|
||||||
|
/productname: "Nextcloud"
|
||||||
|
# It’s VERY important to respect the 4 spaces indentation here!
|
||||||
|
# You have to double the escape character \
|
||||||
|
- json-like:
|
||||||
|
/productname: ".*cloud"
|
||||||
|
/versionstring: "29\\..*"
|
||||||
|
- json-is: '{"foo": "bar", "baz": 42}'
|
||||||
|
```
|
||||||
|
|
||||||
|
## Add data to requests
|
||||||
|
|
||||||
|
If you want to specify query parameters, just put them in the path:
|
||||||
|
|
||||||
|
```{code-block} yaml
|
||||||
|
websites:
|
||||||
|
- domain: "https://contact.example.org"
|
||||||
|
paths:
|
||||||
|
- path: "/index.php?action=show_messages"
|
||||||
|
method: "GET"
|
||||||
|
```
|
||||||
|
|
||||||
|
If you want, for example, to test a form and send some data to it:
|
||||||
|
|
||||||
|
```{code-block} yaml
|
||||||
|
websites:
|
||||||
|
- domain: "https://contact.example.org"
|
||||||
|
paths:
|
||||||
|
- path: "/"
|
||||||
|
method: "POST"
|
||||||
|
request_data:
|
||||||
|
# These are the data sent to the server: title and msg
|
||||||
|
data:
|
||||||
|
title: "Hello my friend"
|
||||||
|
msg: "How are you today?"
|
||||||
|
# To send data as JSON (optional, default is false):
|
||||||
|
is_json: true
|
||||||
|
```
|
||||||
|
|
||||||
|
If you need to send some headers in the request:
|
||||||
|
|
||||||
|
```{code-block} yaml
|
||||||
|
websites:
|
||||||
|
- domain: "https://contact.example.org"
|
||||||
|
paths:
|
||||||
|
- path: "/api/mail"
|
||||||
|
method: "PUT"
|
||||||
|
request_data:
|
||||||
|
headers:
|
||||||
|
Authorization: "Bearer foo-bar-baz"
|
||||||
```
|
```
|
||||||
|
|
||||||
## SSL certificate expiration
|
## SSL certificate expiration
|
||||||
|
@ -30,7 +131,7 @@ caption: config.yaml
|
||||||
|
|
||||||
```{code-block} yaml
|
```{code-block} yaml
|
||||||
---
|
---
|
||||||
caption: config.yaml
|
caption: argos-config.yaml
|
||||||
---
|
---
|
||||||
ssl:
|
ssl:
|
||||||
thresholds:
|
thresholds:
|
||||||
|
@ -42,4 +143,4 @@ ssl:
|
||||||
- path: "/"
|
- path: "/"
|
||||||
checks:
|
checks:
|
||||||
- ssl-certificate-expiration: "on-check"
|
- ssl-certificate-expiration: "on-check"
|
||||||
```
|
```
|
||||||
|
|
413
docs/cli.md
413
docs/cli.md
|
@ -1,3 +1,6 @@
|
||||||
|
---
|
||||||
|
description: How to use Argos from the command line.
|
||||||
|
---
|
||||||
# Command-line interface
|
# Command-line interface
|
||||||
|
|
||||||
<!-- [[[cog
|
<!-- [[[cog
|
||||||
|
@ -26,9 +29,9 @@ Options:
|
||||||
--help Show this message and exit.
|
--help Show this message and exit.
|
||||||
|
|
||||||
Commands:
|
Commands:
|
||||||
agent Get and run tasks to the provided server.
|
agent Get and run tasks for the provided server.
|
||||||
server
|
server Commands for managing server, server’s configuration and users
|
||||||
version
|
version Prints Argos’ version and exits
|
||||||
```
|
```
|
||||||
|
|
||||||
<!--[[[end]]]
|
<!--[[[end]]]
|
||||||
|
@ -43,7 +46,7 @@ Commands:
|
||||||
```man
|
```man
|
||||||
Usage: argos agent [OPTIONS] SERVER_URL AUTH
|
Usage: argos agent [OPTIONS] SERVER_URL AUTH
|
||||||
|
|
||||||
Get and run tasks to the provided server. Will wait for new tasks.
|
Get and run tasks for the provided server. Will wait for new tasks.
|
||||||
|
|
||||||
Usage: argos agent https://argos.example.org "auth-token-here"
|
Usage: argos agent https://argos.example.org "auth-token-here"
|
||||||
|
|
||||||
|
@ -57,7 +60,9 @@ Options:
|
||||||
--max-tasks INTEGER Number of concurrent tasks this agent can run
|
--max-tasks INTEGER Number of concurrent tasks this agent can run
|
||||||
--wait-time INTEGER Waiting time between two polls on the server
|
--wait-time INTEGER Waiting time between two polls on the server
|
||||||
(seconds)
|
(seconds)
|
||||||
--log-level [DEBUG|INFO|WARNING|ERROR|CRITICAL]
|
--log-level [debug|info|warning|error|critical]
|
||||||
|
--user-agent TEXT A custom string to append to the User-Agent
|
||||||
|
header
|
||||||
--help Show this message and exit.
|
--help Show this message and exit.
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -73,15 +78,22 @@ Options:
|
||||||
```man
|
```man
|
||||||
Usage: argos server [OPTIONS] COMMAND [ARGS]...
|
Usage: argos server [OPTIONS] COMMAND [ARGS]...
|
||||||
|
|
||||||
|
Commands for managing server, server’s configuration and users
|
||||||
|
|
||||||
Options:
|
Options:
|
||||||
--help Show this message and exit.
|
--help Show this message and exit.
|
||||||
|
|
||||||
Commands:
|
Commands:
|
||||||
cleandb Clean the database (to run routinely)
|
generate-config Output a self-documented example config file.
|
||||||
generate-token Generate a token for agents
|
generate-token Generate a token for agents
|
||||||
migrate Run database migrations
|
migrate Run database migrations
|
||||||
reload-config Load or reload tasks’ configuration
|
nagios Nagios compatible severities report
|
||||||
start Starts the server (use only for testing or development!)
|
reload-config Load or reload tasks’ configuration
|
||||||
|
start Starts the server (use only for testing or development!)
|
||||||
|
test-apprise Send a test apprise notification
|
||||||
|
test-gotify Send a test gotify notification
|
||||||
|
test-mail Send a test email
|
||||||
|
user User management
|
||||||
```
|
```
|
||||||
|
|
||||||
<!--[[[end]]]
|
<!--[[[end]]]
|
||||||
|
@ -98,14 +110,16 @@ Usage: argos server start [OPTIONS]
|
||||||
|
|
||||||
Starts the server (use only for testing or development!)
|
Starts the server (use only for testing or development!)
|
||||||
|
|
||||||
See https://framasoft.frama.io/framaspace/argos/deployment/systemd.html#server
|
See https://argos-monitoring.framasoft.org/deployment/systemd.html#server for
|
||||||
for advices on how to start the server for production.
|
advices on how to start the server for production.
|
||||||
|
|
||||||
Options:
|
Options:
|
||||||
--host TEXT Host to bind
|
--host TEXT Host to bind
|
||||||
--port INTEGER Port to bind
|
--port INTEGER Port to bind
|
||||||
--config TEXT Path of the configuration file. If ARGOS_YAML_FILE environment
|
--config TEXT Path of the configuration file. If ARGOS_YAML_FILE environment
|
||||||
variable is set, its value will be used instead.
|
variable is set, its value will be used instead. Default
|
||||||
|
value: argos-config.yaml and /etc/argos/config.yaml as
|
||||||
|
fallback.
|
||||||
--reload Enable hot reloading
|
--reload Enable hot reloading
|
||||||
--help Show this message and exit.
|
--help Show this message and exit.
|
||||||
```
|
```
|
||||||
|
@ -127,7 +141,8 @@ Usage: argos server migrate [OPTIONS]
|
||||||
|
|
||||||
Options:
|
Options:
|
||||||
--config TEXT Path of the configuration file. If ARGOS_YAML_FILE environment
|
--config TEXT Path of the configuration file. If ARGOS_YAML_FILE environment
|
||||||
variable is set, its value will be used instead.
|
variable is set, its value will be used instead. Default value:
|
||||||
|
argos-config.yaml and /etc/argos/config.yaml as fallback.
|
||||||
--help Show this message and exit.
|
--help Show this message and exit.
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -135,34 +150,6 @@ Options:
|
||||||
-->
|
-->
|
||||||
|
|
||||||
|
|
||||||
### Server cleandb
|
|
||||||
<!--
|
|
||||||
.. [[[cog
|
|
||||||
help(["server", "cleandb", "--help"])
|
|
||||||
.. ]]] -->
|
|
||||||
|
|
||||||
```man
|
|
||||||
Usage: argos server cleandb [OPTIONS]
|
|
||||||
|
|
||||||
Clean the database (to run routinely)
|
|
||||||
|
|
||||||
- Removes old results from the database.
|
|
||||||
- Removes locks from tasks that have been locked for too long.
|
|
||||||
|
|
||||||
Options:
|
|
||||||
--max-results INTEGER Number of results per task to keep
|
|
||||||
--max-lock-seconds INTEGER The number of seconds after which a lock is
|
|
||||||
considered stale, must be higher than 60 (the
|
|
||||||
checks have a timeout value of 60 seconds)
|
|
||||||
--config TEXT Path of the configuration file. If ARGOS_YAML_FILE
|
|
||||||
environment variable is set, its value will be
|
|
||||||
used instead.
|
|
||||||
--help Show this message and exit.
|
|
||||||
```
|
|
||||||
|
|
||||||
<!--[[[end]]]
|
|
||||||
-->
|
|
||||||
|
|
||||||
### Server reload-config
|
### Server reload-config
|
||||||
|
|
||||||
<!--
|
<!--
|
||||||
|
@ -176,15 +163,43 @@ Usage: argos server reload-config [OPTIONS]
|
||||||
Read tasks’ configuration and add/delete tasks in database if needed
|
Read tasks’ configuration and add/delete tasks in database if needed
|
||||||
|
|
||||||
Options:
|
Options:
|
||||||
--config TEXT Path of the configuration file. If ARGOS_YAML_FILE environment
|
--config TEXT Path of the configuration file. If ARGOS_YAML_FILE
|
||||||
variable is set, its value will be used instead.
|
environment variable is set, its value will be used
|
||||||
--help Show this message and exit.
|
instead. Default value: argos-config.yaml and
|
||||||
|
/etc/argos/config.yaml as fallback.
|
||||||
|
--enqueue / --no-enqueue Let Argos main recurring tasks handle
|
||||||
|
configuration’s loading. It may delay the
|
||||||
|
application of the new configuration up to 2
|
||||||
|
minutes. Default is --no-enqueue
|
||||||
|
--help Show this message and exit.
|
||||||
```
|
```
|
||||||
|
|
||||||
<!--[[[end]]]
|
<!--[[[end]]]
|
||||||
-->
|
-->
|
||||||
|
|
||||||
### Server generate-token command
|
### Server generate-config
|
||||||
|
|
||||||
|
<!--
|
||||||
|
.. [[[cog
|
||||||
|
help(["server", "generate-config", "--help"])
|
||||||
|
.. ]]] -->
|
||||||
|
|
||||||
|
```man
|
||||||
|
Usage: argos server generate-config [OPTIONS]
|
||||||
|
|
||||||
|
Output a self-documented example config file.
|
||||||
|
|
||||||
|
Redirect the output to a file to save it:
|
||||||
|
argos server generate-config > /etc/argos/config.yaml
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--help Show this message and exit.
|
||||||
|
```
|
||||||
|
|
||||||
|
<!--[[[end]]]
|
||||||
|
-->
|
||||||
|
|
||||||
|
### Server generate-token
|
||||||
|
|
||||||
<!--
|
<!--
|
||||||
.. [[[cog
|
.. [[[cog
|
||||||
|
@ -204,3 +219,309 @@ Options:
|
||||||
|
|
||||||
<!--[[[end]]]
|
<!--[[[end]]]
|
||||||
-->
|
-->
|
||||||
|
|
||||||
|
### Server user management
|
||||||
|
|
||||||
|
You can choose to protect Argos’ web interface with a user system, in which case you’ll need to create at least one user.
|
||||||
|
|
||||||
|
See [`unauthenticated_access` in the configuration file](configuration.md) to allow partial or total unauthenticated access to Argos.
|
||||||
|
|
||||||
|
See [`ldap` in the configuration file](configuration.md) to authenticate users against a LDAP server instead of Argos’ database.
|
||||||
|
|
||||||
|
You can manage Argos’ users only through CLI.
|
||||||
|
|
||||||
|
NB: you can’t manage the LDAP users with Argos.
|
||||||
|
|
||||||
|
<!--
|
||||||
|
.. [[[cog
|
||||||
|
help(["server", "user", "--help"])
|
||||||
|
.. ]]] -->
|
||||||
|
|
||||||
|
```man
|
||||||
|
Usage: argos server user [OPTIONS] COMMAND [ARGS]...
|
||||||
|
|
||||||
|
User management
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--help Show this message and exit.
|
||||||
|
|
||||||
|
Commands:
|
||||||
|
add Add new user
|
||||||
|
change-password Change user’s password
|
||||||
|
delete Delete user
|
||||||
|
disable Disable user
|
||||||
|
enable Enable user
|
||||||
|
show List all users
|
||||||
|
verify-password Test user’s password
|
||||||
|
```
|
||||||
|
|
||||||
|
<!--[[[end]]]
|
||||||
|
-->
|
||||||
|
|
||||||
|
#### Add user
|
||||||
|
|
||||||
|
<!--
|
||||||
|
.. [[[cog
|
||||||
|
help(["server", "user", "add", "--help"])
|
||||||
|
.. ]]] -->
|
||||||
|
|
||||||
|
```man
|
||||||
|
Usage: argos server user add [OPTIONS]
|
||||||
|
|
||||||
|
Add new user
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--config TEXT Path of the configuration file. If ARGOS_YAML_FILE
|
||||||
|
environment variable is set, its value will be used instead.
|
||||||
|
--name TEXT Name of the user to create.
|
||||||
|
--password TEXT
|
||||||
|
--help Show this message and exit.
|
||||||
|
```
|
||||||
|
|
||||||
|
<!--[[[end]]]
|
||||||
|
-->
|
||||||
|
|
||||||
|
#### Change the password of a user
|
||||||
|
|
||||||
|
<!--
|
||||||
|
.. [[[cog
|
||||||
|
help(["server", "user", "change-password", "--help"])
|
||||||
|
.. ]]] -->
|
||||||
|
|
||||||
|
```man
|
||||||
|
Usage: argos server user change-password [OPTIONS]
|
||||||
|
|
||||||
|
Change user’s password
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--config TEXT Path of the configuration file. If ARGOS_YAML_FILE
|
||||||
|
environment variable is set, its value will be used instead.
|
||||||
|
--name TEXT Name of the user you want to change the password.
|
||||||
|
--password TEXT
|
||||||
|
--help Show this message and exit.
|
||||||
|
```
|
||||||
|
|
||||||
|
<!--[[[end]]]
|
||||||
|
-->
|
||||||
|
|
||||||
|
#### Delete a user
|
||||||
|
|
||||||
|
<!--
|
||||||
|
.. [[[cog
|
||||||
|
help(["server", "user", "delete", "--help"])
|
||||||
|
.. ]]] -->
|
||||||
|
|
||||||
|
```man
|
||||||
|
Usage: argos server user delete [OPTIONS]
|
||||||
|
|
||||||
|
Delete user
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--config TEXT Path of the configuration file. If ARGOS_YAML_FILE environment
|
||||||
|
variable is set, its value will be used instead.
|
||||||
|
--name TEXT Name of the user to delete. [required]
|
||||||
|
--help Show this message and exit.
|
||||||
|
```
|
||||||
|
|
||||||
|
<!--[[[end]]]
|
||||||
|
-->
|
||||||
|
|
||||||
|
#### Disable a user
|
||||||
|
|
||||||
|
Disabling a user prevents the user to login and access Argos’ web interface but its credentials are still stored in Argos’ database.
|
||||||
|
|
||||||
|
<!--
|
||||||
|
.. [[[cog
|
||||||
|
help(["server", "user", "disable", "--help"])
|
||||||
|
.. ]]] -->
|
||||||
|
|
||||||
|
```man
|
||||||
|
Usage: argos server user disable [OPTIONS]
|
||||||
|
|
||||||
|
Disable user
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--config TEXT Path of the configuration file. If ARGOS_YAML_FILE environment
|
||||||
|
variable is set, its value will be used instead.
|
||||||
|
--name TEXT Name of the user to disable. [required]
|
||||||
|
--help Show this message and exit.
|
||||||
|
```
|
||||||
|
|
||||||
|
<!--[[[end]]]
|
||||||
|
-->
|
||||||
|
|
||||||
|
#### Enable a user
|
||||||
|
|
||||||
|
Enabling a user prevents the user to login and access Argos’ web interface.
|
||||||
|
|
||||||
|
Obviously, the user needs to exists and to be disabled before using the command.
|
||||||
|
|
||||||
|
<!--
|
||||||
|
.. [[[cog
|
||||||
|
help(["server", "user", "enable", "--help"])
|
||||||
|
.. ]]] -->
|
||||||
|
|
||||||
|
```man
|
||||||
|
Usage: argos server user enable [OPTIONS]
|
||||||
|
|
||||||
|
Enable user
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--config TEXT Path of the configuration file. If ARGOS_YAML_FILE environment
|
||||||
|
variable is set, its value will be used instead.
|
||||||
|
--name TEXT Name of the user to reenable [required]
|
||||||
|
--help Show this message and exit.
|
||||||
|
```
|
||||||
|
|
||||||
|
<!--[[[end]]]
|
||||||
|
-->
|
||||||
|
|
||||||
|
#### List all users
|
||||||
|
|
||||||
|
Show all accounts, with their status (enabled or disabled).
|
||||||
|
|
||||||
|
<!--
|
||||||
|
.. [[[cog
|
||||||
|
help(["server", "user", "show", "--help"])
|
||||||
|
.. ]]] -->
|
||||||
|
|
||||||
|
```man
|
||||||
|
Usage: argos server user show [OPTIONS]
|
||||||
|
|
||||||
|
List all users
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--config TEXT Path of the configuration file. If ARGOS_YAML_FILE environment
|
||||||
|
variable is set, its value will be used instead.
|
||||||
|
--help Show this message and exit.
|
||||||
|
```
|
||||||
|
|
||||||
|
<!--[[[end]]]
|
||||||
|
-->
|
||||||
|
|
||||||
|
#### Test the password of a user
|
||||||
|
|
||||||
|
You can verify that you have the right password for a user with the following command:
|
||||||
|
|
||||||
|
<!--
|
||||||
|
.. [[[cog
|
||||||
|
help(["server", "user", "verify-password", "--help"])
|
||||||
|
.. ]]] -->
|
||||||
|
|
||||||
|
```man
|
||||||
|
Usage: argos server user verify-password [OPTIONS]
|
||||||
|
|
||||||
|
Test user’s password
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--config TEXT Path of the configuration file. If ARGOS_YAML_FILE
|
||||||
|
environment variable is set, its value will be used instead.
|
||||||
|
--name TEXT Name of the user you want to test the password for.
|
||||||
|
[required]
|
||||||
|
--password TEXT
|
||||||
|
--help Show this message and exit.
|
||||||
|
```
|
||||||
|
|
||||||
|
<!--[[[end]]]
|
||||||
|
-->
|
||||||
|
|
||||||
|
### Use as a nagios probe
|
||||||
|
|
||||||
|
You can directly use Argos to get an output and an exit code usable with Nagios.
|
||||||
|
|
||||||
|
<!--
|
||||||
|
.. [[[cog
|
||||||
|
help(["server", "nagios", "--help"])
|
||||||
|
.. ]]] -->
|
||||||
|
|
||||||
|
```man
|
||||||
|
Usage: argos server nagios [OPTIONS]
|
||||||
|
|
||||||
|
Output a report of current severities suitable for Nagios with a Nagios
|
||||||
|
compatible exit code
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--config TEXT Path of the configuration file. If ARGOS_YAML_FILE environment
|
||||||
|
variable is set, its value will be used instead.
|
||||||
|
--help Show this message and exit.
|
||||||
|
```
|
||||||
|
|
||||||
|
<!--[[[end]]]
|
||||||
|
-->
|
||||||
|
|
||||||
|
### Test the email settings
|
||||||
|
|
||||||
|
You can verify that your mail settings are ok by sending a test email.
|
||||||
|
|
||||||
|
<!--
|
||||||
|
.. [[[cog
|
||||||
|
help(["server", "test-mail", "--help"])
|
||||||
|
.. ]]] -->
|
||||||
|
|
||||||
|
```man
|
||||||
|
Usage: argos server test-mail [OPTIONS]
|
||||||
|
|
||||||
|
Send a test email
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--config TEXT Path of the configuration file. If ARGOS_YAML_FILE
|
||||||
|
environment variable is set, its value will be used instead.
|
||||||
|
--domain TEXT Domain for the notification
|
||||||
|
--severity TEXT Severity
|
||||||
|
--help Show this message and exit.
|
||||||
|
```
|
||||||
|
|
||||||
|
<!--[[[end]]]
|
||||||
|
-->
|
||||||
|
|
||||||
|
### Test the Gotify settings
|
||||||
|
|
||||||
|
You can verify that your Gotify settings are ok by sending a test notification.
|
||||||
|
|
||||||
|
<!--
|
||||||
|
.. [[[cog
|
||||||
|
help(["server", "test-gotify", "--help"])
|
||||||
|
.. ]]] -->
|
||||||
|
|
||||||
|
```man
|
||||||
|
Usage: argos server test-gotify [OPTIONS]
|
||||||
|
|
||||||
|
Send a test gotify notification
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--config TEXT Path of the configuration file. If ARGOS_YAML_FILE
|
||||||
|
environment variable is set, its value will be used instead.
|
||||||
|
--domain TEXT Domain for the notification
|
||||||
|
--severity TEXT Severity
|
||||||
|
--help Show this message and exit.
|
||||||
|
```
|
||||||
|
|
||||||
|
<!--[[[end]]]
|
||||||
|
-->
|
||||||
|
|
||||||
|
### Test the Apprise settings
|
||||||
|
|
||||||
|
You can verify that your Apprise settings are ok by sending a test notification.
|
||||||
|
|
||||||
|
<!--
|
||||||
|
.. [[[cog
|
||||||
|
help(["server", "test-apprise", "--help"])
|
||||||
|
.. ]]] -->
|
||||||
|
|
||||||
|
```man
|
||||||
|
Usage: argos server test-apprise [OPTIONS]
|
||||||
|
|
||||||
|
Send a test apprise notification
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--config TEXT Path of the configuration file. If ARGOS_YAML_FILE
|
||||||
|
environment variable is set, its value will be used
|
||||||
|
instead.
|
||||||
|
--domain TEXT Domain for the notification
|
||||||
|
--severity TEXT Severity
|
||||||
|
--apprise-group TEXT Apprise group for the notification [required]
|
||||||
|
--help Show this message and exit.
|
||||||
|
```
|
||||||
|
|
||||||
|
<!--[[[end]]]
|
||||||
|
-->
|
||||||
|
|
16
docs/conf.py
16
docs/conf.py
|
@ -5,9 +5,12 @@
|
||||||
|
|
||||||
# -- Project information -----------------------------------------------------
|
# -- Project information -----------------------------------------------------
|
||||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
|
# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
|
||||||
|
# pylint: disable-msg=invalid-name,redefined-builtin
|
||||||
|
from os import environ
|
||||||
|
|
||||||
import argos
|
import argos
|
||||||
|
|
||||||
project = "Argos"
|
project = "Argos monitoring"
|
||||||
copyright = "2023, Alexis Métaireau, Framasoft"
|
copyright = "2023, Alexis Métaireau, Framasoft"
|
||||||
author = "Alexis Métaireau, Framasoft"
|
author = "Alexis Métaireau, Framasoft"
|
||||||
release = argos.VERSION
|
release = argos.VERSION
|
||||||
|
@ -32,6 +35,15 @@ html_sidebars = {
|
||||||
# -- Options for HTML output -------------------------------------------------
|
# -- Options for HTML output -------------------------------------------------
|
||||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
|
# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
|
||||||
|
|
||||||
|
smartquotes = False
|
||||||
|
|
||||||
|
if "CI_JOB_ID" in environ:
|
||||||
|
html_baseurl = "https://argos-monitoring.framasoft.org"
|
||||||
|
|
||||||
html_theme = "shibuya"
|
html_theme = "shibuya"
|
||||||
html_static_path = ["_static"]
|
html_static_path = ["_static"]
|
||||||
html_css_files = ["fonts.css"]
|
html_css_files = ["fonts.css", "fix-nav.css"]
|
||||||
|
html_logo = "_static/logo.png"
|
||||||
|
html_theme_options = {
|
||||||
|
"og_image_url": "https://argos-monitoring.framasoft.org/_static/logo.png"
|
||||||
|
}
|
||||||
|
|
|
@ -1,56 +1,16 @@
|
||||||
|
---
|
||||||
|
description: How to configure Argos.
|
||||||
|
---
|
||||||
# Configuration
|
# Configuration
|
||||||
|
|
||||||
There are actually two configuration files: one for the service and one for the checks.
|
Argos uses a simple YAML configuration file to define the server’s configuration, the websites to monitor and the checks to run on these websites.
|
||||||
|
|
||||||
## Server configuration
|
See [here](checks.md) for more informations about the checks you can use.
|
||||||
|
|
||||||
The server configuration is done using environment variables. You can put them in a `.env` file at the root of the project.
|
|
||||||
Here is a list of the useful variables, in the `.env` format:
|
|
||||||
|
|
||||||
```{literalinclude} ../conf/.env.example
|
|
||||||
---
|
|
||||||
caption: .env
|
|
||||||
---
|
|
||||||
```
|
|
||||||
|
|
||||||
### Environment variables
|
|
||||||
|
|
||||||
Here are the environment variables you can define to configure how the service will behave :
|
|
||||||
|
|
||||||
#### ARGOS_YAML_FILE
|
|
||||||
|
|
||||||
The path to the yaml configuration file, defining the checks.
|
|
||||||
|
|
||||||
#### ARGOS_DATABASE_URL
|
|
||||||
|
|
||||||
The database url, as defined [in SQLAlchemy docs](https://docs.sqlalchemy.org/en/20/core/engines.html#database-urls).
|
|
||||||
|
|
||||||
For instance, to connect to a postgres database on localhost with user, pass and dbname "argos":
|
|
||||||
|
|
||||||
```
|
|
||||||
ARGOS_DATABASE_URL = "postgresql://argos:argos@localhost/argos"
|
|
||||||
```
|
|
||||||
|
|
||||||
#### DB_POOL_SIZE
|
|
||||||
#### DB_MAX_OVERFLOW
|
|
||||||
|
|
||||||
You configure the size of the database pool of connection, and the max overflow (until when new connections are accepted ?) These are documented [in the SQLAlchemy docs in greater details](https://docs.sqlalchemy.org/en/20/core/pooling.html#sqlalchemy.pool.QueuePool.params.pool_size)
|
|
||||||
|
|
||||||
```bash
|
|
||||||
DB_POOL_SIZE = 10
|
|
||||||
DB_MAX_OVERFLOW = 20
|
|
||||||
```
|
|
||||||
|
|
||||||
## Argos "checks" configuration
|
|
||||||
|
|
||||||
Argos uses a YAML configuration file to define the websites to monitor and the checks to run on these websites.
|
|
||||||
|
|
||||||
Here is a simple configuration file:
|
|
||||||
|
|
||||||
|
Here is a simple self-documented configuration file, which you can get with [`argos server generate-config`](cli.md#server-generate-config):
|
||||||
|
|
||||||
```{literalinclude} ../conf/config-example.yaml
|
```{literalinclude} ../conf/config-example.yaml
|
||||||
---
|
---
|
||||||
caption: config.yaml
|
caption: argos-config.yaml
|
||||||
---
|
---
|
||||||
|
```
|
||||||
```
|
|
||||||
|
|
|
@ -1,15 +1,19 @@
|
||||||
|
---
|
||||||
|
description: How to configure Nginx to use with Argos.
|
||||||
|
---
|
||||||
# Using Nginx as reverse proxy
|
# Using Nginx as reverse proxy
|
||||||
|
|
||||||
As Argos has no authentication mechanism for the front-end, you need to protect some routes with HTTP authentication.
|
Here is a example for Nginx configuration:
|
||||||
|
|
||||||
To do so on Debian, install `apache2-utils` then create a file containing the wanted credentials:
|
|
||||||
```bash
|
|
||||||
htpasswd -c /etc/nginx/argos.passwd argos_admin
|
|
||||||
```
|
|
||||||
|
|
||||||
You can then use this file to protect the front-end’s routes:
|
|
||||||
```{literalinclude} ../../conf/nginx.conf
|
```{literalinclude} ../../conf/nginx.conf
|
||||||
---
|
---
|
||||||
caption: /etc/nginx/sites-available/argos.example.org
|
caption: /etc/nginx/sites-available/argos.example.org
|
||||||
---
|
---
|
||||||
```
|
```
|
||||||
|
|
||||||
|
If you want to use Argos under a subdirectory of your web server, you’ll need to set the `root_path` setting in Argos’s [configuration](../configuration.md) and set Nginx like this:
|
||||||
|
|
||||||
|
```{literalinclude} ../../conf/nginx-subdirectory.conf
|
||||||
|
---
|
||||||
|
caption: Nginx’s location for Argos in a subdirectory
|
||||||
|
---
|
||||||
|
```
|
||||||
|
|
|
@ -1,9 +1,18 @@
|
||||||
|
---
|
||||||
|
description: Here are the systemd files that can be used to deploy the server and the agents.
|
||||||
|
---
|
||||||
# Using systemd
|
# Using systemd
|
||||||
|
|
||||||
Here are the systemd files that can be used to deploy the server and the agents.
|
Here are the systemd files that can be used to deploy the server and the agents.
|
||||||
|
|
||||||
## Agent
|
## Agent
|
||||||
|
|
||||||
|
```{literalinclude} ../../conf/default-argos-agent
|
||||||
|
---
|
||||||
|
caption: /etc/default/argos-agent
|
||||||
|
---
|
||||||
|
```
|
||||||
|
|
||||||
```{literalinclude} ../../conf/systemd-agent.service
|
```{literalinclude} ../../conf/systemd-agent.service
|
||||||
---
|
---
|
||||||
caption: /etc/systemd/system/argos-agent.service
|
caption: /etc/systemd/system/argos-agent.service
|
||||||
|
@ -12,6 +21,12 @@ caption: /etc/systemd/system/argos-agent.service
|
||||||
|
|
||||||
## Server
|
## Server
|
||||||
|
|
||||||
|
```{literalinclude} ../../conf/default-argos-server
|
||||||
|
---
|
||||||
|
caption: /etc/default/argos-server
|
||||||
|
---
|
||||||
|
```
|
||||||
|
|
||||||
```{literalinclude} ../../conf/systemd-server.service
|
```{literalinclude} ../../conf/systemd-server.service
|
||||||
---
|
---
|
||||||
caption: /etc/systemd/system/argos-server.service
|
caption: /etc/systemd/system/argos-server.service
|
||||||
|
|
|
@ -1,3 +1,6 @@
|
||||||
|
---
|
||||||
|
description: Many thanks to their developers!
|
||||||
|
---
|
||||||
# Main dependencies used by Argos
|
# Main dependencies used by Argos
|
||||||
|
|
||||||
## Python packages
|
## Python packages
|
||||||
|
@ -11,7 +14,9 @@
|
||||||
- [Alembic](https://alembic.sqlalchemy.org) is used for DB migrations;
|
- [Alembic](https://alembic.sqlalchemy.org) is used for DB migrations;
|
||||||
- [Tenacity](https://github.com/jd/tenacity) a small utility to retry a function in case an error occured;
|
- [Tenacity](https://github.com/jd/tenacity) a small utility to retry a function in case an error occured;
|
||||||
- [Uvicorn](https://www.uvicorn.org/) is the tool used to run our server;
|
- [Uvicorn](https://www.uvicorn.org/) is the tool used to run our server;
|
||||||
- [Gunicorn](https://gunicorn.org/) is the recommended WSGI HTTP server for production.
|
- [Gunicorn](https://gunicorn.org/) is the recommended WSGI HTTP server for production;
|
||||||
|
- [Apprise](https://github.com/caronc/apprise/wiki) allows Argos to send notifications through a lot of channels;
|
||||||
|
- [FastAPI Utilities](https://fastapiutils.github.io/fastapi-utils/) is in charge of recurring tasks.
|
||||||
|
|
||||||
## CSS framework
|
## CSS framework
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,6 @@
|
||||||
|
---
|
||||||
|
description: All you need to know to develop on Argos.
|
||||||
|
---
|
||||||
# Installing for development
|
# Installing for development
|
||||||
|
|
||||||
To install all what you need to develop on Argos, do:
|
To install all what you need to develop on Argos, do:
|
||||||
|
|
8
docs/developer/license.md
Normal file
8
docs/developer/license.md
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
---
|
||||||
|
description: Argos is licensed under the terms of the GNU AFFERO GPLv3.
|
||||||
|
---
|
||||||
|
# License
|
||||||
|
|
||||||
|
Argos is licensed under the terms of the GNU AFFERO GPLv3.
|
||||||
|
|
||||||
|
See [LICENSE file](https://framagit.org/framasoft/framaspace/argos/-/blob/main/LICENSE) on the repository.
|
|
@ -1,3 +1,6 @@
|
||||||
|
---
|
||||||
|
description: How to use Alambic to add a database migratation to Argos.
|
||||||
|
---
|
||||||
# Adding a database migration
|
# Adding a database migration
|
||||||
|
|
||||||
We are using [Alembic](https://alembic.sqlalchemy.org) to handle the database
|
We are using [Alembic](https://alembic.sqlalchemy.org) to handle the database
|
||||||
|
@ -7,7 +10,12 @@ First, do your changes in the code, change the model, add new tables, etc. Once
|
||||||
you're done, you can create a new migration.
|
you're done, you can create a new migration.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
venv/bin/alembic -c argos/server/migrations/alembic.ini revision --autogenerate -m "migration reason"
|
venv/bin/alembic -c argos/server/migrations/alembic.ini revision \
|
||||||
|
--autogenerate -m "migration reason"
|
||||||
```
|
```
|
||||||
|
|
||||||
Edit the created file to remove comments and adapt it to make sure the migration is complete (Alembic is not powerful enough to cover all the corner cases).
|
Edit the created file to remove comments and adapt it to make sure the migration is complete (Alembic is not powerful enough to cover all the corner cases).
|
||||||
|
|
||||||
|
In case you want to add an `Enum` type and use it in an existing table, please have a look at [`argos/server/migrations/versions/dcf73fa19fce_specify_check_method.py`](https://framagit.org/framasoft/framaspace/argos/-/blob/main/argos/server/migrations/versions/dcf73fa19fce_specify_check_method.py).
|
||||||
|
|
||||||
|
If you want to add an `Enum` type in a new table, you can do like in [`argos/server/migrations/versions/7d480e6f1112_initial_migrations.py`](https://framagit.org/framasoft/framaspace/argos/-/blob/main/argos/server/migrations/versions/7d480e6f1112_initial_migrations.py)
|
||||||
|
|
|
@ -1,3 +1,6 @@
|
||||||
|
---
|
||||||
|
description: What’s in the database?
|
||||||
|
---
|
||||||
# The data model
|
# The data model
|
||||||
|
|
||||||
```{mermaid}
|
```{mermaid}
|
||||||
|
@ -25,6 +28,19 @@ class Result{
|
||||||
- severity
|
- severity
|
||||||
- context
|
- context
|
||||||
}
|
}
|
||||||
|
class ConfigCache {
|
||||||
|
- name
|
||||||
|
- val
|
||||||
|
- updated_at
|
||||||
|
}
|
||||||
|
class User {
|
||||||
|
- username
|
||||||
|
- password
|
||||||
|
- disabled
|
||||||
|
- created_at
|
||||||
|
- updated_at
|
||||||
|
- last_login_at
|
||||||
|
}
|
||||||
Result "*" o-- "1" Task : has many
|
Result "*" o-- "1" Task : has many
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,6 @@
|
||||||
|
---
|
||||||
|
description: Don’t worry, creating a new check is quite easy.
|
||||||
|
---
|
||||||
# Implementing a new check
|
# Implementing a new check
|
||||||
|
|
||||||
## Creating a new check class
|
## Creating a new check class
|
||||||
|
@ -37,4 +40,8 @@ If that's your case, you can implement the `finalize` method, and return some ex
|
||||||
async def finalize(cls, config, result, extra_arg):
|
async def finalize(cls, config, result, extra_arg):
|
||||||
# You can use the extra_arg here to determine the severity
|
# You can use the extra_arg here to determine the severity
|
||||||
return Status.SUCCESS, Severity.OK
|
return Status.SUCCESS, Severity.OK
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Document the new check
|
||||||
|
|
||||||
|
Please, document the use of the new check in `docs/checks.md` and `argos/config-example.yaml`.
|
||||||
|
|
37
docs/developer/new-notification-way.md
Normal file
37
docs/developer/new-notification-way.md
Normal file
|
@ -0,0 +1,37 @@
|
||||||
|
---
|
||||||
|
description: Adding a new notification way is quite simple.
|
||||||
|
---
|
||||||
|
# Add a notification way
|
||||||
|
|
||||||
|
Adding a new notification way is quite simple.
|
||||||
|
|
||||||
|
First, you need to think about how you will configure it.
|
||||||
|
As example, here’s how gotify notifications are configured:
|
||||||
|
```yaml
|
||||||
|
gotify:
|
||||||
|
- url: https://example.org
|
||||||
|
tokens:
|
||||||
|
- foo
|
||||||
|
- bar
|
||||||
|
```
|
||||||
|
|
||||||
|
Feel free to open an issue to discuss about your notification way or its configuration before coding!
|
||||||
|
See [#50](https://framagit.org/framasoft/framaspace/argos/-/issues/50) for example.
|
||||||
|
|
||||||
|
Then, you’ll need to add the pydantic schema matching your config in [`argos/schemas/config.py`](https://framagit.org/framasoft/framaspace/argos/-/blob/main/argos/schemas/config.py).
|
||||||
|
|
||||||
|
For gotify, it’s:
|
||||||
|
```python
|
||||||
|
class GotifyUrl(BaseModel):
|
||||||
|
url: HttpUrl
|
||||||
|
tokens: List[str]
|
||||||
|
```
|
||||||
|
|
||||||
|
Add the schema to the `General` schema in the same file (don’t forget to make it optional).
|
||||||
|
|
||||||
|
For gotify, we added this:
|
||||||
|
```python
|
||||||
|
gotify: Optional[List[GotifyUrl]] = None
|
||||||
|
```
|
||||||
|
|
||||||
|
Finally, write a function which use your new notification way in [`argos/server/alerting.py`](https://framagit.org/framasoft/framaspace/argos/-/blob/main/argos/server/alerting.py) and use it in the `handle_alert` function of the same file.
|
|
@ -1,3 +1,6 @@
|
||||||
|
---
|
||||||
|
description: An agent and a server, that’s all.
|
||||||
|
---
|
||||||
# Technical overview
|
# Technical overview
|
||||||
|
|
||||||
Argos uses an agent and server architecture. The server is responsible for storing the configuration and the results of the checks. The agent is responsible for running the checks and sending the results to the server.
|
Argos uses an agent and server architecture. The server is responsible for storing the configuration and the results of the checks. The agent is responsible for running the checks and sending the results to the server.
|
||||||
|
|
|
@ -1,3 +1,6 @@
|
||||||
|
---
|
||||||
|
description: Once in a while, we release this package. Here is how.
|
||||||
|
---
|
||||||
# Releasing guide
|
# Releasing guide
|
||||||
|
|
||||||
Once in a while, we release this package. Here is how.
|
Once in a while, we release this package. Here is how.
|
||||||
|
@ -17,15 +20,29 @@ You'll need to get an account on [PyPI](https://pypi.org), where the packages wi
|
||||||
Here is the quick version. If you need more details, some parts are explained in more details in the next sections.
|
Here is the quick version. If you need more details, some parts are explained in more details in the next sections.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
# Be sure you are on the good branch
|
||||||
|
git checkout main
|
||||||
|
|
||||||
# Ensure the tests run correctly
|
# Ensure the tests run correctly
|
||||||
make test
|
make test
|
||||||
|
|
||||||
|
# Check static typing
|
||||||
|
make mypy
|
||||||
|
|
||||||
# Bump the version, according to semantic versionning
|
# Bump the version, according to semantic versionning
|
||||||
hatch version minor # or `hatch version major`
|
hatch version minor # or `hatch version major`, or `hatch version fix`
|
||||||
|
|
||||||
|
# Update the changelog
|
||||||
|
sed -e "s/## .Unreleased./&\n\n## $(hatch version)\n\nDate: $(date +%F)/" \
|
||||||
|
-i CHANGELOG.md
|
||||||
|
|
||||||
|
# Commit the change
|
||||||
|
git add argos/__init__.py CHANGELOG.md
|
||||||
|
git commit -m "🏷 — Bump version ($(hatch version))"
|
||||||
|
|
||||||
# Create a tag on the git repository and push it
|
# Create a tag on the git repository and push it
|
||||||
git tag "$(hatch version)" && git push
|
git tag "$(hatch version)" -m "$(hatch version)" &&
|
||||||
|
git push --follow-tags
|
||||||
|
|
||||||
# Build the project
|
# Build the project
|
||||||
hatch build --clean
|
hatch build --clean
|
||||||
|
@ -74,7 +91,7 @@ If you're still experimenting, you can use the [Test PyPI](https://test.pypi.org
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Publishing on test PyPI
|
# Publishing on test PyPI
|
||||||
hatch build -r test
|
hatch publish -r test
|
||||||
|
|
||||||
# Installing from test PyPI
|
# Installing from test PyPI
|
||||||
pip install --index-url https://test.pypi.org/simple/ argos-monitoring
|
pip install --index-url https://test.pypi.org/simple/ argos-monitoring
|
||||||
|
|
|
@ -1,3 +1,6 @@
|
||||||
|
---
|
||||||
|
description: Depending on your setup, you might need different tools to develop on argos.
|
||||||
|
---
|
||||||
# Requirements
|
# Requirements
|
||||||
|
|
||||||
Depending on your setup, you might need different tools to develop on argos. We try to list them here.
|
Depending on your setup, you might need different tools to develop on argos. We try to list them here.
|
||||||
|
@ -14,4 +17,4 @@ brew install gnu-sed
|
||||||
|
|
||||||
# This will explain how to add it to your path (to replace the default one)
|
# This will explain how to add it to your path (to replace the default one)
|
||||||
brew info gnu-sed
|
brew info gnu-sed
|
||||||
```
|
```
|
||||||
|
|
|
@ -1,3 +1,6 @@
|
||||||
|
---
|
||||||
|
description: Launch tests! Make linting tools happy!
|
||||||
|
---
|
||||||
# Tests and linting
|
# Tests and linting
|
||||||
|
|
||||||
## Tests
|
## Tests
|
||||||
|
@ -19,3 +22,8 @@ You can launch all of them with:
|
||||||
```bash
|
```bash
|
||||||
make lint
|
make lint
|
||||||
```
|
```
|
||||||
|
|
||||||
|
To let `ruff` format the code, run:
|
||||||
|
```bash
|
||||||
|
make ruff-format
|
||||||
|
```
|
||||||
|
|
37
docs/faq.md
Normal file
37
docs/faq.md
Normal file
|
@ -0,0 +1,37 @@
|
||||||
|
---
|
||||||
|
description: Soooo much questions…
|
||||||
|
---
|
||||||
|
# FAQ
|
||||||
|
|
||||||
|
## How is it different than Nagios?
|
||||||
|
|
||||||
|
In a few words, Argos do less things than Nagios, but it makes it more simple.
|
||||||
|
|
||||||
|
Nagios can do a lot more than Argos, as it can monitor the load of a server, its disk occupation and so much more.
|
||||||
|
You can extend the possibilities of Nagios with your own plugins, allowing to monitor almost everything.
|
||||||
|
Argos can only monitor web sites, in various ways (check the HTTP status, check the certificate validity time…).
|
||||||
|
|
||||||
|
On the other hand, configuration and deployment of Argos are very much simpler than Nagios’.
|
||||||
|
|
||||||
|
## How is it different than statping-ng or Uptime Kuma?
|
||||||
|
|
||||||
|
In one word: scalability.
|
||||||
|
|
||||||
|
While [statping-ng](https://statping-ng.github.io/) and [Uptime Kumap](https://uptime.kuma.pet/) have a similar goal than Argos, you can’t monitor thousands of web sites with them efficiently as their dashboard wants to present you the results of all of your web sites at once… and with the history of the results.
|
||||||
|
|
||||||
|
We gave those solutions a try, but fetching thousand of results from the dashboard made the backend overloads.
|
||||||
|
|
||||||
|
## Who created Argos?
|
||||||
|
|
||||||
|
### Framasoft
|
||||||
|
|
||||||
|
Framasoft is a non-profit association founded in 2004, financed by [donations](https://support.framasoft.org/), which is limited to a dozen employees and about thirty volunteers (a group of friends!).
|
||||||
|
You can find more informations on <https://framasoft.org/>.
|
||||||
|
|
||||||
|
We needed a very efficient web sites monitoring tool for one of our project, but didn’t had time to develop it, so we hired [Alexis Métaireau](#alexis-metaireau) for that.
|
||||||
|
|
||||||
|
### Alexis Métaireau
|
||||||
|
|
||||||
|
Alexis is a long-time free software developer, who has worked for Mozilla, created [Pelican](http://getpelican.com/), a static site generator, [I Hate Money](http://ihatemoney.org/), a website for managing group expenses and many more other projects.
|
||||||
|
|
||||||
|
See <https://blog.notmyidea.org/> for more informations about him.
|
|
@ -1,3 +1,6 @@
|
||||||
|
---
|
||||||
|
description: A monitoring and status board for websites. Test how your websites respond to external checks, get notified when something goes wrong.
|
||||||
|
---
|
||||||
# Argos monitoring
|
# Argos monitoring
|
||||||
|
|
||||||
A monitoring and status board for websites.
|
A monitoring and status board for websites.
|
||||||
|
@ -37,6 +40,8 @@ installation/postgresql
|
||||||
cli
|
cli
|
||||||
api
|
api
|
||||||
changelog
|
changelog
|
||||||
|
faq
|
||||||
|
installation/tl-dr
|
||||||
```
|
```
|
||||||
|
|
||||||
```{toctree}
|
```{toctree}
|
||||||
|
@ -61,9 +66,11 @@ developer/installation
|
||||||
developer/overview
|
developer/overview
|
||||||
developer/dependencies
|
developer/dependencies
|
||||||
developer/new-check
|
developer/new-check
|
||||||
|
developer/new-notification-way
|
||||||
developer/models
|
developer/models
|
||||||
developer/migrations
|
developer/migrations
|
||||||
developer/tests
|
developer/tests
|
||||||
developer/release
|
developer/release
|
||||||
|
developer/license
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
@ -1,10 +1,38 @@
|
||||||
|
---
|
||||||
|
description: Install Argos, with all the explanations you want.
|
||||||
|
---
|
||||||
# Installation
|
# Installation
|
||||||
|
|
||||||
|
NB: if you want a quick-installation guide, we [got you covered](tl-dr.md).
|
||||||
|
|
||||||
## Requirements
|
## Requirements
|
||||||
|
|
||||||
- Python 3.11+
|
- Python 3.11+
|
||||||
- PostgreSQL 13+ (for production)
|
- PostgreSQL 13+ (for production)
|
||||||
|
|
||||||
|
### Optional dependencies
|
||||||
|
|
||||||
|
If you want to use LDAP authentication, you will need to install some packages (here for a Debian-based system):
|
||||||
|
|
||||||
|
```bash
|
||||||
|
apt-get install build-essential python3-dev libldap-dev libsasl2-dev
|
||||||
|
```
|
||||||
|
|
||||||
|
## Recommendation
|
||||||
|
|
||||||
|
Create a dedicated user for argos:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
adduser --home /opt/argos --disabled-login --disabled-password --system argos
|
||||||
|
```
|
||||||
|
|
||||||
|
Do all the manipulations below in `/opt/argos/`, with the user `argos`.
|
||||||
|
Either use `sudo` or login as `argos` with the following command:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
su argos -s /bin/bash
|
||||||
|
```
|
||||||
|
|
||||||
## Install with pip
|
## Install with pip
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
@ -12,12 +40,31 @@ pip install argos-monitoring
|
||||||
```
|
```
|
||||||
|
|
||||||
You may want to install Argos in a virtualenv:
|
You may want to install Argos in a virtualenv:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
python3 -m venv venv
|
python3 -m venv venv
|
||||||
source venv/bin/activate
|
source venv/bin/activate
|
||||||
pip install argos-monitoring
|
pip install argos-monitoring
|
||||||
```
|
```
|
||||||
|
|
||||||
|
For production, we recommend the use of [Gunicorn](https://gunicorn.org/), which you can install at the same time as Argos:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pip install "argos-monitoring[gunicorn]"
|
||||||
|
```
|
||||||
|
|
||||||
|
If you want to use LDAP authentication, you’ll need to install Argos this way:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pip install "argos-monitoring[ldap]"
|
||||||
|
```
|
||||||
|
|
||||||
|
And for an installation with Gunicorn and LDAP authentication:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pip install "argos-monitoring[gunicorn,ldap]"
|
||||||
|
```
|
||||||
|
|
||||||
## Install from sources
|
## Install from sources
|
||||||
|
|
||||||
Once you got the source locally, create a virtualenv and install the dependencies:
|
Once you got the source locally, create a virtualenv and install the dependencies:
|
||||||
|
@ -28,27 +75,33 @@ source venv/bin/activate
|
||||||
pip install -e .
|
pip install -e .
|
||||||
```
|
```
|
||||||
|
|
||||||
|
To install gunicorn, use `pip install -e ".[gunicorn]"` instead of `pip install -e .`
|
||||||
|
|
||||||
## Configure
|
## Configure
|
||||||
|
|
||||||
The quickest way to get started is to get the `config-example.yaml` file from our repository and edit it:
|
The quickest way to get started is to generate the configuration file from argos and edit it:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
wget https://framagit.org/framasoft/framaspace/argos/-/raw/main/conf/config-example.yaml -O config.yaml
|
argos server generate-config > argos-config.yaml
|
||||||
```
|
```
|
||||||
|
|
||||||
You can read more about the configuration in the [configuration section](../configuration.md).
|
You can read more about the configuration in the [configuration section](../configuration.md).
|
||||||
|
|
||||||
### Configure the server
|
For production, we suggest to put your config in `/etc/argos/config.yaml` and restricts the file’s permissions.
|
||||||
|
As root:
|
||||||
Environment variables are used to configure the server. You can also put them in an `.env` file:
|
```bash
|
||||||
|
mkdir /etc/argos
|
||||||
```{literalinclude} ../../conf/.env.example
|
chown argos: /etc/argos
|
||||||
---
|
chmod 700 /etc/argos
|
||||||
caption: .env
|
|
||||||
---
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Please note that the only supported database engines are SQLite for development and PostgreSQL for production.
|
Then, as `argos`:
|
||||||
|
```bash
|
||||||
|
argos server generate-config > /etc/argos/config.yaml
|
||||||
|
chmod 600 /etc/argos/config.yaml
|
||||||
|
```
|
||||||
|
|
||||||
|
Please note that the only supported database engines are SQLite for development and [PostgreSQL](postgresql.md) for production.
|
||||||
|
|
||||||
## Apply migrations to database
|
## Apply migrations to database
|
||||||
|
|
||||||
|
@ -60,7 +113,7 @@ argos server migrate
|
||||||
|
|
||||||
## Inject tasks into the database
|
## Inject tasks into the database
|
||||||
|
|
||||||
Argos keeps tasks’ configuration in database, take from the config file.
|
Argos keeps tasks’ configuration in database, taken from the config file.
|
||||||
|
|
||||||
Populate the database with the tasks:
|
Populate the database with the tasks:
|
||||||
|
|
||||||
|
@ -68,17 +121,7 @@ Populate the database with the tasks:
|
||||||
argos server reload-config
|
argos server reload-config
|
||||||
```
|
```
|
||||||
|
|
||||||
## Starting the server
|
## Generating a token
|
||||||
|
|
||||||
Then you can start the server:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
argos server start
|
|
||||||
```
|
|
||||||
|
|
||||||
The server reads the `yaml` file at startup, and populates the tasks queue with the checks defined in the configuration.
|
|
||||||
|
|
||||||
## Generating a token
|
|
||||||
|
|
||||||
The agent needs an authentication token to be able to communicate with the server.
|
The agent needs an authentication token to be able to communicate with the server.
|
||||||
|
|
||||||
|
@ -95,23 +138,67 @@ service:
|
||||||
- "auth-token"
|
- "auth-token"
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Starting the server
|
||||||
|
|
||||||
|
Then you can start the server:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
argos server start
|
||||||
|
```
|
||||||
|
|
||||||
|
This way to start the server is not suitable for production, use it only for developing or testing.
|
||||||
|
|
||||||
|
## Starting the server for production
|
||||||
|
|
||||||
|
For production, you can use [Gunicorn](https://gunicorn.org/) to start the server.
|
||||||
|
|
||||||
|
To install Gunicorn in the virtualenv, if you didn’t already install Argos that way:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pip install "argos-monitoring[gunicorn]"
|
||||||
|
```
|
||||||
|
|
||||||
|
To start the server:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
gunicorn "argos.server.main:get_application()" -k uvicorn.workers.UvicornWorker
|
||||||
|
```
|
||||||
|
|
||||||
|
There is some gunicorn’s options that you should use:
|
||||||
|
- `-w INT, --workers INT`: the number of worker processes for handling requests. Default is `1`.
|
||||||
|
- `-b ADDRESS, --bind ADDRESS`: the socket to bind. Default is `127.0.0.1:8000`.
|
||||||
|
- `--forwarded-allow-ips STRING`: front-end's IPs from which allowed to handle set secure headers as a comma-separated list. Default is `127.0.0.1`.
|
||||||
|
|
||||||
|
So, to start the server with 4 workers while listening to `127.0.0.1:8001`:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
gunicorn "argos.server.main:get_application()" -k uvicorn.workers.UvicornWorker -w 4 -b 127.0.0.1:8001
|
||||||
|
```
|
||||||
|
|
||||||
|
Gunicorn has a lot of other options, have a look at `gunicorn --help`.
|
||||||
|
|
||||||
|
Argos uses FastAPI, so you can use other ways to start the server.
|
||||||
|
See <https://fastapi.tiangolo.com/deployment/manually/#asgi-servers> (but Gunicorn is recommended).
|
||||||
|
|
||||||
|
See [here](../deployment/systemd.md#server) for a systemd service example and [here](../deployment/nginx.md) for a nginx configuration example.
|
||||||
|
|
||||||
## Running the agent
|
## Running the agent
|
||||||
|
|
||||||
You can run the agent on the same machine as the server, or on a different machine.
|
You can run the agent on the same machine as the server, or on a different machine.
|
||||||
The only requirement is that the agent can reach the server.
|
The only requirement is that the agent can reach the server through HTTP or HTTPS.
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
argos agent http://localhost:8000 "auth-token"
|
argos agent http://localhost:8000 "auth-token"
|
||||||
```
|
```
|
||||||
|
|
||||||
## Cleaning the database
|
## Watch the agents
|
||||||
|
|
||||||
You also have to run cleaning tasks periodically. `argos server clean --help` will give you more information on how to do that.
|
In order to be sure that agents are up and communicate with the server, you can periodically run the `argos server watch-agents` command.
|
||||||
|
|
||||||
Here is a crontab example, which will clean the db each hour:
|
Here is a crontab example, which will check the agents every 5 minutes:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Run the cleaning tasks every hour (at minute 7)
|
*/5 * * * * argos server watch-agents --time-without-agent 10
|
||||||
# Keeps 10 results per task, and remove tasks’ locks older than 1 hour
|
|
||||||
7 * * * * argos server cleandb --max-results 10 --max-lock-seconds 3600
|
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Check the documentation of the command with `argos server watch-agents --help`
|
||||||
|
|
|
@ -1,3 +1,6 @@
|
||||||
|
---
|
||||||
|
description: Here are a few steps for you to install PostgreSQL on your system.
|
||||||
|
---
|
||||||
# Install and configure PostgreSQL
|
# Install and configure PostgreSQL
|
||||||
|
|
||||||
Here are a few steps for you to install PostgreSQL on your system:
|
Here are a few steps for you to install PostgreSQL on your system:
|
||||||
|
|
160
docs/installation/tl-dr.md
Normal file
160
docs/installation/tl-dr.md
Normal file
|
@ -0,0 +1,160 @@
|
||||||
|
---
|
||||||
|
description: You want to install Argos fast? Ok, here we go.
|
||||||
|
---
|
||||||
|
# TL;DR: fast installation instructions
|
||||||
|
|
||||||
|
You want to install Argos fast? Ok, here we go.
|
||||||
|
|
||||||
|
## For testing
|
||||||
|
|
||||||
|
This is for testing only!
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo apt install python3
|
||||||
|
mkdir /tmp/argos
|
||||||
|
cd /tmp/argos
|
||||||
|
python3 -m venv venv
|
||||||
|
source venv/bin/activate
|
||||||
|
pip install argos-monitoring
|
||||||
|
argos server generate-config |
|
||||||
|
sed -e "s@production@dev@" \
|
||||||
|
-e "s@url: .postgresql.*@url: \"sqlite:////tmp/argos.db\"@" > argos-config.yaml
|
||||||
|
argos server migrate
|
||||||
|
ARGOS_TOKEN=$(argos server generate-token)
|
||||||
|
sed -e "s@# - secret_token@- $ARGOS_TOKEN@" -i argos-config.yaml
|
||||||
|
echo "The agent token is $ARGOS_TOKEN"
|
||||||
|
```
|
||||||
|
|
||||||
|
Edit `argos-config.yaml`.
|
||||||
|
Add some real web sites to test.
|
||||||
|
|
||||||
|
Then:
|
||||||
|
|
||||||
|
```
|
||||||
|
argos server reload-config
|
||||||
|
argos server start --host 0.0.0.0 --port 8000
|
||||||
|
```
|
||||||
|
|
||||||
|
In another terminal:
|
||||||
|
|
||||||
|
```
|
||||||
|
cd /tmp/argos
|
||||||
|
source venv/bin/activate
|
||||||
|
argos agent http://127.0.0.1:8000 the_generated_token
|
||||||
|
```
|
||||||
|
|
||||||
|
Then go to `http://127.0.0.1:8000` or `http://the_IP_address_of_your_server:8000`.
|
||||||
|
|
||||||
|
## For production
|
||||||
|
|
||||||
|
```bash
|
||||||
|
apt install python3 postgresql
|
||||||
|
sudo -u postgres createuser -P argos
|
||||||
|
sudo -u postgres createdb -O argos argos
|
||||||
|
sudo -u postgres psql -c "ALTER DATABASE argos SET TIMEZONE TO 'UTC';"
|
||||||
|
adduser --home /opt/argos --disabled-login --disabled-password --system argos
|
||||||
|
|
||||||
|
cd /opt/argos
|
||||||
|
sudo -u argos python3 -m venv venv
|
||||||
|
sudo -u argos bash -c 'source venv/bin/activate && pip install "argos-monitoring[gunicorn]"'
|
||||||
|
|
||||||
|
mkdir /etc/argos
|
||||||
|
/opt/argos/venv/bin/argos server generate-config > /etc/argos/config.yaml
|
||||||
|
|
||||||
|
cat <<EOF > /etc/default/argos-server
|
||||||
|
ARGOS_YAML_FILE="/etc/argos/config.yaml"
|
||||||
|
ARGOS_SERVER_WORKERS=4
|
||||||
|
ARGOS_SERVER_SOCKET=127.0.0.1:8000
|
||||||
|
# Comma separated list of IP addresses of the web proxy (usually Nginx)
|
||||||
|
ARGOS_SERVER_FORWARDED_ALLOW_IPS=127.0.0.1
|
||||||
|
EOF
|
||||||
|
|
||||||
|
cat <<EOF > /etc/default/argos-agent
|
||||||
|
ARGOS_AGENT_TOKEN=Secret
|
||||||
|
ARGOS_AGENT_SERVER_URL=http://127.0.0.1:8000
|
||||||
|
ARGOS_AGENT_LOGLEVEL=WARNING
|
||||||
|
ARGOS_AGENT_MAX_TASKS=20
|
||||||
|
ARGOS_AGENT_WAIT_TIME=10
|
||||||
|
EOF
|
||||||
|
|
||||||
|
cat <<EOF > /etc/systemd/system/argos-server.service
|
||||||
|
[Unit]
|
||||||
|
Description=Argos server
|
||||||
|
Documentation=https://argos-monitoring.framasoft.org/
|
||||||
|
Requires=network.target postgresql.service
|
||||||
|
After=network.target postgresql.service
|
||||||
|
PartOf=postgresql.service
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
User=argos
|
||||||
|
WorkingDirectory=/opt/argos/
|
||||||
|
EnvironmentFile=/etc/default/argos-server
|
||||||
|
ExecStartPre=/opt/argos/venv/bin/argos server migrate
|
||||||
|
ExecStartPre=/opt/argos/venv/bin/argos server reload-config --enqueue
|
||||||
|
ExecStart=/opt/argos/venv/bin/gunicorn "argos.server.main:get_application()" \\
|
||||||
|
--workers \$ARGOS_SERVER_WORKERS \\
|
||||||
|
--worker-class uvicorn.workers.UvicornWorker \\
|
||||||
|
--bind \$ARGOS_SERVER_SOCKET \\
|
||||||
|
--forwarded-allow-ips \$ARGOS_SERVER_FORWARDED_ALLOW_IPS
|
||||||
|
ExecReload=/opt/argos/venv/bin/argos server reload-config --enqueue
|
||||||
|
SyslogIdentifier=argos-server
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
|
EOF
|
||||||
|
|
||||||
|
cat <<EOF > /etc/systemd/system/argos-agent.service
|
||||||
|
[Unit]
|
||||||
|
Description=Argos agent
|
||||||
|
Documentation=https://argos-monitoring.framasoft.org/
|
||||||
|
Requires=network.target
|
||||||
|
After=network.target
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
User=argos
|
||||||
|
EnvironmentFile=/etc/default/argos-agent
|
||||||
|
WorkingDirectory=/opt/argos/
|
||||||
|
ExecStart=/opt/argos/venv/bin/argos agent --max-tasks \$ARGOS_AGENT_MAX_TASKS \\
|
||||||
|
--wait-time \$ARGOS_AGENT_WAIT_TIME \\
|
||||||
|
--log-level \$ARGOS_AGENT_LOGLEVEL
|
||||||
|
SyslogIdentifier=argos-agent
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
|
EOF
|
||||||
|
|
||||||
|
chown -R argos: /etc/default/argos-* /etc/argos/
|
||||||
|
chmod 700 /etc/argos
|
||||||
|
chmod 600 /etc/argos/config.yaml
|
||||||
|
|
||||||
|
systemctl daemon-reload
|
||||||
|
```
|
||||||
|
|
||||||
|
Then, edit `/etc/argos/config.yaml` to put your database password in it and change the other settings to suit your needs.
|
||||||
|
|
||||||
|
Create a token for your agent :
|
||||||
|
|
||||||
|
```bash
|
||||||
|
sudo -u argos /opt/argos/venv/bin/argos server generate-token
|
||||||
|
```
|
||||||
|
|
||||||
|
Edit `/etc/default/argos-agent` to put the generated token in it and change the other settings to suit your needs.
|
||||||
|
|
||||||
|
Edit `/etc/argos/config.yaml` to configure Argos (don’t forget to add the generated token in it too).
|
||||||
|
|
||||||
|
Enable and start the server and the agent and make sure they works:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
systemctl enable --now argos-server.service argos-agent.service
|
||||||
|
systemctl status argos-server.service argos-agent.service
|
||||||
|
```
|
||||||
|
|
||||||
|
If all works well, you have to put some cron tasks in `argos` crontab:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cat <<EOF | crontab -u argos -
|
||||||
|
*/10 * * * * /opt/argos/venv/bin/argos server watch-agents --time-without-agent 10:
|
||||||
|
EOF
|
||||||
|
```
|
||||||
|
|
||||||
|
See the [this page](../deployment/nginx.md) for using Nginx as reverse proxy.
|
|
@ -22,11 +22,18 @@ classifiers = [
|
||||||
|
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"alembic>=1.13.0,<1.14",
|
"alembic>=1.13.0,<1.14",
|
||||||
|
"apprise>=1.9.0,<2",
|
||||||
|
"bcrypt>=4.1.3,<5",
|
||||||
"click>=8.1,<9",
|
"click>=8.1,<9",
|
||||||
|
"durations-nlp>=1.0.1,<2",
|
||||||
"fastapi>=0.103,<0.104",
|
"fastapi>=0.103,<0.104",
|
||||||
"gunicorn>=21.2,<22",
|
"fastapi-login>=1.10.0,<2",
|
||||||
"httpx>=0.25,<1",
|
"fastapi-utils>=0.8.0,<0.9",
|
||||||
|
"httpx>=0.27.2,<0.28.0",
|
||||||
"Jinja2>=3.0,<4",
|
"Jinja2>=3.0,<4",
|
||||||
|
"jsonpointer>=3.0,<4",
|
||||||
|
"passlib>=1.7.4,<2",
|
||||||
|
"psutil>=5.9.8,<6",
|
||||||
"psycopg2-binary>=2.9,<3",
|
"psycopg2-binary>=2.9,<3",
|
||||||
"pydantic[email]>=2.4,<3",
|
"pydantic[email]>=2.4,<3",
|
||||||
"pydantic-settings>=2.0,<3",
|
"pydantic-settings>=2.0,<3",
|
||||||
|
@ -36,6 +43,7 @@ dependencies = [
|
||||||
"sqlalchemy[asyncio]>=2.0,<3",
|
"sqlalchemy[asyncio]>=2.0,<3",
|
||||||
"sqlalchemy-utils>=0.41,<1",
|
"sqlalchemy-utils>=0.41,<1",
|
||||||
"tenacity>=8.2,<9",
|
"tenacity>=8.2,<9",
|
||||||
|
"typing_inspect>=0.9.0,<1",
|
||||||
"uvicorn>=0.23,<1",
|
"uvicorn>=0.23,<1",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -43,16 +51,18 @@ dependencies = [
|
||||||
dev = [
|
dev = [
|
||||||
"black==23.3.0",
|
"black==23.3.0",
|
||||||
"djlint>=1.34.0",
|
"djlint>=1.34.0",
|
||||||
|
"hatch==1.13.0",
|
||||||
"ipdb>=0.13,<0.14",
|
"ipdb>=0.13,<0.14",
|
||||||
"ipython>=8.16,<9",
|
"ipython>=8.16,<9",
|
||||||
"isort==5.11.5",
|
"isort==5.11.5",
|
||||||
"pylint>=3.0.2",
|
"mypy>=1.10.0,<2",
|
||||||
|
"pylint>=3.2.5",
|
||||||
"pytest-asyncio>=0.21,<1",
|
"pytest-asyncio>=0.21,<1",
|
||||||
"pytest>=6.2.5",
|
"pytest>=6.2.5",
|
||||||
"respx>=0.20,<1",
|
"respx>=0.20,<1",
|
||||||
"ruff==0.1.5,<1",
|
"ruff==0.1.5,<1",
|
||||||
"sphinx-autobuild",
|
"sphinx-autobuild",
|
||||||
"hatch==1.9.4",
|
"types-PyYAML",
|
||||||
]
|
]
|
||||||
docs = [
|
docs = [
|
||||||
"cogapp",
|
"cogapp",
|
||||||
|
@ -62,9 +72,15 @@ docs = [
|
||||||
"sphinx>=7,<8",
|
"sphinx>=7,<8",
|
||||||
"sphinxcontrib-mermaid>=0.9,<1",
|
"sphinxcontrib-mermaid>=0.9,<1",
|
||||||
]
|
]
|
||||||
|
gunicorn = [
|
||||||
|
"gunicorn>=21.2,<22",
|
||||||
|
]
|
||||||
|
ldap = [
|
||||||
|
"python-ldap>=3.4.4,<4",
|
||||||
|
]
|
||||||
|
|
||||||
[project.urls]
|
[project.urls]
|
||||||
homepage = "https://framasoft.frama.io/framaspace/argos/"
|
homepage = "https://argos-monitoring.framasoft.org/"
|
||||||
repository = "https://framagit.org/framasoft/framaspace/argos"
|
repository = "https://framagit.org/framasoft/framaspace/argos"
|
||||||
"Funding" = "https://framasoft.org/en/#support"
|
"Funding" = "https://framasoft.org/en/#support"
|
||||||
"Tracker" = "https://framagit.org/framasoft/framaspace/argos/-/issues"
|
"Tracker" = "https://framagit.org/framasoft/framaspace/argos/-/issues"
|
||||||
|
@ -94,3 +110,10 @@ testpaths = [
|
||||||
"argos"
|
"argos"
|
||||||
]
|
]
|
||||||
pythonpath = "."
|
pythonpath = "."
|
||||||
|
filterwarnings = [
|
||||||
|
"ignore:'crypt' is deprecated and slated for removal in Python 3.13:DeprecationWarning",
|
||||||
|
"ignore:The 'app' shortcut is now deprecated:DeprecationWarning",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.mypy]
|
||||||
|
ignore_missing_imports = "True"
|
||||||
|
|
|
@ -1,4 +1,21 @@
|
||||||
|
---
|
||||||
general:
|
general:
|
||||||
|
# Except for frequency and recheck_delay settings, changes in general
|
||||||
|
# section of the configuration will need a restart of argos server.
|
||||||
|
db:
|
||||||
|
# The database URL, as defined in SQLAlchemy docs:
|
||||||
|
# https://docs.sqlalchemy.org/en/20/core/engines.html#database-urls
|
||||||
|
url: "sqlite:////tmp/test-argos.db"
|
||||||
|
# Can be "production", "dev", "test".
|
||||||
|
# If not present, default value is "production"
|
||||||
|
env: test
|
||||||
|
# To get a good string for cookie_secret, run:
|
||||||
|
# openssl rand -hex 32
|
||||||
|
cookie_secret: "foo-bar-baz"
|
||||||
|
|
||||||
|
# Default delay for checks.
|
||||||
|
# Can be superseeded in domain configuration.
|
||||||
|
# For ex., to run checks every 5 minutes:
|
||||||
frequency: "1m"
|
frequency: "1m"
|
||||||
alerts:
|
alerts:
|
||||||
ok:
|
ok:
|
||||||
|
@ -9,12 +26,37 @@ general:
|
||||||
- local
|
- local
|
||||||
unknown:
|
unknown:
|
||||||
- local
|
- local
|
||||||
|
no_agent:
|
||||||
|
- local
|
||||||
service:
|
service:
|
||||||
secrets:
|
secrets:
|
||||||
|
# Secrets can be generated using `argos server generate-token`.
|
||||||
|
# You need at least one. Write them as a list, like:
|
||||||
|
# - secret_token
|
||||||
- "O4kt8Max9/k0EmHaEJ0CGGYbBNFmK8kOZNIoUk3Kjwc"
|
- "O4kt8Max9/k0EmHaEJ0CGGYbBNFmK8kOZNIoUk3Kjwc"
|
||||||
- "x1T1VZR51pxrv5pQUyzooMG4pMUvHNMhA5y/3cUsYVs="
|
- "x1T1VZR51pxrv5pQUyzooMG4pMUvHNMhA5y/3cUsYVs="
|
||||||
ssl:
|
ssl:
|
||||||
thresholds:
|
thresholds:
|
||||||
- "1d": critical
|
- "1d": critical
|
||||||
"5d": warning
|
- "5d": warning
|
||||||
|
|
||||||
|
# Argos will execute some tasks in the background for you
|
||||||
|
# every 2 minutes and needs some configuration for that
|
||||||
|
recurring_tasks:
|
||||||
|
# Maximum age of results
|
||||||
|
# Use m for minutes, h for hours, d for days
|
||||||
|
# w for weeks, M for months, y for years
|
||||||
|
# See https://github.com/timwedde/durations_nlp#scales-reference for details
|
||||||
|
max_results_age: "1d"
|
||||||
|
# Max number of seconds a task can be locked
|
||||||
|
# Minimum value is 61, default is 100
|
||||||
|
max_lock_seconds: 100
|
||||||
|
# Max number of seconds without seing an agent
|
||||||
|
# before sending an alert
|
||||||
|
# Minimum value is 61, default is 300
|
||||||
|
time_without_agent: 300
|
||||||
|
|
||||||
|
# It's also possible to define the checks in another file
|
||||||
|
# with the include syntax:
|
||||||
|
#
|
||||||
websites: !include websites.yaml
|
websites: !include websites.yaml
|
||||||
|
|
|
@ -6,11 +6,11 @@ from fastapi import FastAPI
|
||||||
from fastapi.testclient import TestClient
|
from fastapi.testclient import TestClient
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
os.environ["ARGOS_APP_ENV"] = "test"
|
os.environ["ARGOS_YAML_FILE"] = "tests/config.yaml"
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def db() -> Session:
|
def db() -> Session: # type: ignore[misc]
|
||||||
from argos.server import models
|
from argos.server import models
|
||||||
|
|
||||||
app = _create_app()
|
app = _create_app()
|
||||||
|
@ -20,7 +20,7 @@ def db() -> Session:
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def app() -> FastAPI:
|
def app() -> FastAPI: # type: ignore[misc]
|
||||||
from argos.server import models
|
from argos.server import models
|
||||||
|
|
||||||
app = _create_app()
|
app = _create_app()
|
||||||
|
@ -45,10 +45,6 @@ def _create_app() -> FastAPI:
|
||||||
)
|
)
|
||||||
|
|
||||||
app = get_application()
|
app = get_application()
|
||||||
# Hardcode the database url and the yaml file for testing purpose
|
|
||||||
# Otherwise, the app will try to read the .env file or the environment variables
|
|
||||||
app.state.settings.database_url = "sqlite:////tmp/test-argos.db"
|
|
||||||
app.state.settings.yaml_file = "tests/config.yaml"
|
|
||||||
|
|
||||||
setup_database(app)
|
setup_database(app)
|
||||||
asyncio.run(connect_to_db(app))
|
asyncio.run(connect_to_db(app))
|
||||||
|
|
|
@ -21,7 +21,7 @@ def test_tasks_retrieval_and_results(authorized_client, app):
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
|
||||||
tasks = response.json()
|
tasks = response.json()
|
||||||
assert len(tasks) == 2
|
assert len(tasks) == 4
|
||||||
|
|
||||||
results = []
|
results = []
|
||||||
for task in tasks:
|
for task in tasks:
|
||||||
|
@ -33,7 +33,7 @@ def test_tasks_retrieval_and_results(authorized_client, app):
|
||||||
response = client.post("/api/results", json=data)
|
response = client.post("/api/results", json=data)
|
||||||
|
|
||||||
assert response.status_code == 201
|
assert response.status_code == 201
|
||||||
assert app.state.db.query(models.Result).count() == 2
|
assert app.state.db.query(models.Result).count() == 4
|
||||||
|
|
||||||
# The list of tasks should be empty now
|
# The list of tasks should be empty now
|
||||||
response = client.get("/api/tasks")
|
response = client.get("/api/tasks")
|
||||||
|
@ -60,6 +60,8 @@ def ssl_task(db):
|
||||||
task = models.Task(
|
task = models.Task(
|
||||||
url="https://exemple.com/",
|
url="https://exemple.com/",
|
||||||
domain="https://exemple.com/",
|
domain="https://exemple.com/",
|
||||||
|
ip_version="6",
|
||||||
|
method="GET",
|
||||||
check="ssl-certificate-expiration",
|
check="ssl-certificate-expiration",
|
||||||
expected="on-check",
|
expected="on-check",
|
||||||
frequency=1,
|
frequency=1,
|
||||||
|
|
|
@ -35,7 +35,13 @@ def ssl_task(now):
|
||||||
id=1,
|
id=1,
|
||||||
url="https://example.org",
|
url="https://example.org",
|
||||||
domain="https://example.org",
|
domain="https://example.org",
|
||||||
|
ip_version="6",
|
||||||
|
method="GET",
|
||||||
|
request_data=None,
|
||||||
|
task_group="GET-6-https://example.org",
|
||||||
check="ssl-certificate-expiration",
|
check="ssl-certificate-expiration",
|
||||||
|
retry_before_notification=0,
|
||||||
|
contiguous_failures=0,
|
||||||
expected="on-check",
|
expected="on-check",
|
||||||
selected_at=now,
|
selected_at=now,
|
||||||
selected_by="pytest",
|
selected_by="pytest",
|
||||||
|
@ -51,6 +57,9 @@ async def test_ssl_check_accepts_statuts(
|
||||||
return_value=httpx.Response(http_status, extensions=httpx_extensions_ssl),
|
return_value=httpx.Response(http_status, extensions=httpx_extensions_ssl),
|
||||||
)
|
)
|
||||||
async with httpx.AsyncClient() as client:
|
async with httpx.AsyncClient() as client:
|
||||||
check = SSLCertificateExpiration(client, ssl_task)
|
check = SSLCertificateExpiration(ssl_task)
|
||||||
check_response = await check.run()
|
response = await client.request(
|
||||||
|
method=ssl_task.method, url=ssl_task.url, timeout=60
|
||||||
|
)
|
||||||
|
check_response = await check.run(response)
|
||||||
assert check_response.status == "on-check"
|
assert check_response.status == "on-check"
|
||||||
|
|
150
tests/test_cli.py
Normal file
150
tests/test_cli.py
Normal file
|
@ -0,0 +1,150 @@
|
||||||
|
import os
|
||||||
|
|
||||||
|
from click.testing import CliRunner
|
||||||
|
from argos.commands import (
|
||||||
|
add,
|
||||||
|
verify_password,
|
||||||
|
change_password,
|
||||||
|
show,
|
||||||
|
disable,
|
||||||
|
enable,
|
||||||
|
delete,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
os.environ["ARGOS_APP_ENV"] = "test"
|
||||||
|
os.environ["ARGOS_YAML_FILE"] = "tests/config.yaml"
|
||||||
|
|
||||||
|
|
||||||
|
def test_add_user():
|
||||||
|
runner = CliRunner()
|
||||||
|
result = runner.invoke(add, ["--name", "foo"], input="bar\nbar\n")
|
||||||
|
assert result.exit_code == 0
|
||||||
|
assert result.output == "Password: \nRepeat for confirmation: \nUser foo added.\n"
|
||||||
|
result = runner.invoke(add, ["--name", "foo"], input="bar\nbar\n")
|
||||||
|
assert result.exit_code == 1
|
||||||
|
assert (
|
||||||
|
result.output
|
||||||
|
== "Password: \nRepeat for confirmation: \nUser foo already exists.\n"
|
||||||
|
)
|
||||||
|
result = runner.invoke(add, ["--name", "baz", "--password", "qux"])
|
||||||
|
assert result.exit_code == 0
|
||||||
|
assert result.output == "User baz added.\n"
|
||||||
|
|
||||||
|
|
||||||
|
def test_user_password():
|
||||||
|
runner = CliRunner()
|
||||||
|
result = runner.invoke(verify_password, ["--name", "foo"], input="bar\n")
|
||||||
|
assert result.exit_code == 0
|
||||||
|
assert result.output == "Password: \nThe provided password is correct.\n"
|
||||||
|
result = runner.invoke(verify_password, ["--name", "foo", "--password", "bar"])
|
||||||
|
assert result.exit_code == 0
|
||||||
|
assert result.output == "The provided password is correct.\n"
|
||||||
|
result = runner.invoke(verify_password, ["--name", "quux", "--password", "corge"])
|
||||||
|
assert result.exit_code == 1
|
||||||
|
assert result.output == "User quux does not exist.\n"
|
||||||
|
result = runner.invoke(verify_password, ["--name", "foo", "--password", "grault"])
|
||||||
|
assert result.exit_code == 2
|
||||||
|
assert result.output == "Wrong password!\n"
|
||||||
|
|
||||||
|
|
||||||
|
def test_change_password():
|
||||||
|
runner = CliRunner()
|
||||||
|
result = runner.invoke(verify_password, ["--name", "foo", "--password", "grault"])
|
||||||
|
assert result.exit_code == 2
|
||||||
|
assert result.output == "Wrong password!\n"
|
||||||
|
result = runner.invoke(change_password, ["--name", "foo"], input="grault\ngrault\n")
|
||||||
|
assert result.exit_code == 0
|
||||||
|
assert (
|
||||||
|
result.output
|
||||||
|
== "Password: \nRepeat for confirmation: \nPassword of user foo changed.\n"
|
||||||
|
)
|
||||||
|
result = runner.invoke(verify_password, ["--name", "foo", "--password", "grault"])
|
||||||
|
assert result.exit_code == 0
|
||||||
|
assert result.output == "The provided password is correct.\n"
|
||||||
|
result = runner.invoke(change_password, ["--name", "foo", "--password", "bar"])
|
||||||
|
assert result.exit_code == 0
|
||||||
|
assert result.output == "Password of user foo changed.\n"
|
||||||
|
result = runner.invoke(verify_password, ["--name", "foo", "--password", "bar"])
|
||||||
|
assert result.exit_code == 0
|
||||||
|
assert result.output == "The provided password is correct.\n"
|
||||||
|
result = runner.invoke(verify_password, ["--name", "quux", "--password", "bar"])
|
||||||
|
assert result.exit_code == 1
|
||||||
|
assert result.output == "User quux does not exist.\n"
|
||||||
|
|
||||||
|
|
||||||
|
def test_show():
|
||||||
|
runner = CliRunner()
|
||||||
|
result = runner.invoke(show)
|
||||||
|
assert result.exit_code == 0
|
||||||
|
assert (
|
||||||
|
result.output
|
||||||
|
== "✅ means that the user is enabled.\n❌ means that the user is disabled.\n"
|
||||||
|
"✅ baz, last login: None\n✅ foo, last login: None\n"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_disable():
|
||||||
|
runner = CliRunner()
|
||||||
|
result = runner.invoke(disable, ["--name", "quux"])
|
||||||
|
assert result.exit_code == 1
|
||||||
|
assert result.output == "User quux does not exist.\n"
|
||||||
|
result = runner.invoke(disable, ["--name", "foo"])
|
||||||
|
assert result.exit_code == 0
|
||||||
|
assert result.output == "User foo disabled.\n"
|
||||||
|
result = runner.invoke(disable, ["--name", "foo"])
|
||||||
|
assert result.exit_code == 2
|
||||||
|
assert result.output == "User foo is already disabled.\n"
|
||||||
|
result = runner.invoke(show)
|
||||||
|
assert result.exit_code == 0
|
||||||
|
assert (
|
||||||
|
result.output
|
||||||
|
== "✅ means that the user is enabled.\n❌ means that the user is disabled.\n"
|
||||||
|
"✅ baz, last login: None\n❌ foo, last login: None\n"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_enable():
|
||||||
|
runner = CliRunner()
|
||||||
|
result = runner.invoke(enable, ["--name", "quux"])
|
||||||
|
assert result.exit_code == 1
|
||||||
|
assert result.output == "User quux does not exist.\n"
|
||||||
|
result = runner.invoke(enable, ["--name", "foo"])
|
||||||
|
assert result.exit_code == 0
|
||||||
|
assert result.output == "User foo enabled.\n"
|
||||||
|
result = runner.invoke(enable, ["--name", "foo"])
|
||||||
|
assert result.exit_code == 2
|
||||||
|
assert result.output == "User foo is already enabled.\n"
|
||||||
|
result = runner.invoke(show)
|
||||||
|
assert result.exit_code == 0
|
||||||
|
assert (
|
||||||
|
result.output
|
||||||
|
== "✅ means that the user is enabled.\n❌ means that the user is disabled.\n"
|
||||||
|
"✅ baz, last login: None\n✅ foo, last login: None\n"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_delete():
|
||||||
|
runner = CliRunner()
|
||||||
|
result = runner.invoke(delete, ["--name", "quux"])
|
||||||
|
assert result.exit_code == 1
|
||||||
|
assert result.output == "User quux does not exist.\n"
|
||||||
|
result = runner.invoke(delete, ["--name", "foo"])
|
||||||
|
assert result.exit_code == 0
|
||||||
|
assert result.output == "User foo deleted.\n"
|
||||||
|
result = runner.invoke(delete, ["--name", "foo"])
|
||||||
|
assert result.exit_code == 1
|
||||||
|
assert result.output == "User foo does not exist.\n"
|
||||||
|
result = runner.invoke(show)
|
||||||
|
assert result.exit_code == 0
|
||||||
|
assert (
|
||||||
|
result.output
|
||||||
|
== "✅ means that the user is enabled.\n❌ means that the user is disabled.\n"
|
||||||
|
"✅ baz, last login: None\n"
|
||||||
|
)
|
||||||
|
result = runner.invoke(delete, ["--name", "baz"])
|
||||||
|
assert result.exit_code == 0
|
||||||
|
assert result.output == "User baz deleted.\n"
|
||||||
|
result = runner.invoke(show)
|
||||||
|
assert result.exit_code == 1
|
||||||
|
assert result.output == "There is no users in database.\n"
|
|
@ -4,18 +4,18 @@ import pytest
|
||||||
|
|
||||||
from argos import schemas
|
from argos import schemas
|
||||||
from argos.server import queries
|
from argos.server import queries
|
||||||
from argos.server.models import Result, Task
|
from argos.server.models import Result, Task, User
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_remove_old_results(db, ten_tasks):
|
async def test_remove_old_results(db, ten_tasks): # pylint: disable-msg=redefined-outer-name
|
||||||
for task in ten_tasks:
|
for _task in ten_tasks:
|
||||||
for i in range(5):
|
for iterator in range(5):
|
||||||
result = Result(
|
result = Result(
|
||||||
submitted_at=datetime.now(),
|
submitted_at=datetime.now() - timedelta(seconds=iterator * 2),
|
||||||
status="success",
|
status="success",
|
||||||
context={"foo": "bar"},
|
context={"foo": "bar"},
|
||||||
task=task,
|
task=_task,
|
||||||
agent_id="test",
|
agent_id="test",
|
||||||
severity="ok",
|
severity="ok",
|
||||||
)
|
)
|
||||||
|
@ -24,12 +24,12 @@ async def test_remove_old_results(db, ten_tasks):
|
||||||
|
|
||||||
# So we have 5 results per tasks
|
# So we have 5 results per tasks
|
||||||
assert db.query(Result).count() == 50
|
assert db.query(Result).count() == 50
|
||||||
# Keep only 2
|
# Keep only those newer than 1 second ago
|
||||||
deleted = await queries.remove_old_results(db, 2)
|
deleted = await queries.remove_old_results(db, 6)
|
||||||
assert deleted == 30
|
assert deleted == 20
|
||||||
assert db.query(Result).count() == 20
|
assert db.query(Result).count() == 30
|
||||||
for task in ten_tasks:
|
for _task in ten_tasks:
|
||||||
assert db.query(Result).filter(Result.task == task).count() == 2
|
assert db.query(Result).filter(Result.task == _task).count() == 3
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
|
@ -40,7 +40,7 @@ async def test_remove_old_results_with_empty_db(db):
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_release_old_locks(db, ten_locked_tasks, ten_tasks):
|
async def test_release_old_locks(db, ten_locked_tasks, ten_tasks): # pylint: disable-msg=redefined-outer-name
|
||||||
assert db.query(Task).count() == 20
|
assert db.query(Task).count() == 20
|
||||||
released = await queries.release_old_locks(db, 10)
|
released = await queries.release_old_locks(db, 10)
|
||||||
assert released == 10
|
assert released == 10
|
||||||
|
@ -54,9 +54,9 @@ async def test_release_old_locks_with_empty_db(db):
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_update_from_config_with_duplicate_tasks(db, empty_config):
|
async def test_update_from_config_with_duplicate_tasks(db, empty_config): # pylint: disable-msg=redefined-outer-name
|
||||||
# We pass the same path twice
|
# We pass the same path twice
|
||||||
fake_path = dict(path="/", checks=[{"body-contains": "foo"}])
|
fake_path = {"path": "/", "checks": [{"body-contains": "foo"}]}
|
||||||
website = schemas.config.Website(
|
website = schemas.config.Website(
|
||||||
domain="https://example.org",
|
domain="https://example.org",
|
||||||
paths=[
|
paths=[
|
||||||
|
@ -70,7 +70,7 @@ async def test_update_from_config_with_duplicate_tasks(db, empty_config):
|
||||||
await queries.update_from_config(db, empty_config)
|
await queries.update_from_config(db, empty_config)
|
||||||
|
|
||||||
# Only one path has been saved in the database
|
# Only one path has been saved in the database
|
||||||
assert db.query(Task).count() == 1
|
assert db.query(Task).count() == 2
|
||||||
|
|
||||||
# Calling again with the same data works, and will not result in more tasks being
|
# Calling again with the same data works, and will not result in more tasks being
|
||||||
# created.
|
# created.
|
||||||
|
@ -79,12 +79,15 @@ async def test_update_from_config_with_duplicate_tasks(db, empty_config):
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_update_from_config_db_can_remove_duplicates_and_old_tasks(
|
async def test_update_from_config_db_can_remove_duplicates_and_old_tasks(
|
||||||
db, empty_config, task
|
db,
|
||||||
|
empty_config,
|
||||||
|
task, # pylint: disable-msg=redefined-outer-name
|
||||||
):
|
):
|
||||||
# Add a duplicate in the db
|
# Add a duplicate in the db
|
||||||
same_task = Task(
|
same_task = Task(
|
||||||
url=task.url,
|
url=task.url,
|
||||||
domain=task.domain,
|
domain=task.domain,
|
||||||
|
ip_version="6",
|
||||||
check=task.check,
|
check=task.check,
|
||||||
expected=task.expected,
|
expected=task.expected,
|
||||||
frequency=task.frequency,
|
frequency=task.frequency,
|
||||||
|
@ -96,10 +99,25 @@ async def test_update_from_config_db_can_remove_duplicates_and_old_tasks(
|
||||||
website = schemas.config.Website(
|
website = schemas.config.Website(
|
||||||
domain=task.domain,
|
domain=task.domain,
|
||||||
paths=[
|
paths=[
|
||||||
dict(
|
{
|
||||||
path="https://another-example.com", checks=[{task.check: task.expected}]
|
"path": "https://another-example.com",
|
||||||
),
|
"checks": [{task.check: task.expected}],
|
||||||
dict(path=task.url, checks=[{task.check: task.expected}]),
|
},
|
||||||
|
{"path": task.url, "checks": [{task.check: task.expected}]},
|
||||||
|
],
|
||||||
|
)
|
||||||
|
empty_config.websites = [website]
|
||||||
|
|
||||||
|
await queries.update_from_config(db, empty_config)
|
||||||
|
assert db.query(Task).count() == 4
|
||||||
|
|
||||||
|
website = schemas.config.Website(
|
||||||
|
domain=task.domain,
|
||||||
|
paths=[
|
||||||
|
{
|
||||||
|
"path": "https://another-example.com",
|
||||||
|
"checks": [{task.check: task.expected}],
|
||||||
|
}
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
empty_config.websites = [website]
|
empty_config.websites = [website]
|
||||||
|
@ -107,39 +125,28 @@ async def test_update_from_config_db_can_remove_duplicates_and_old_tasks(
|
||||||
await queries.update_from_config(db, empty_config)
|
await queries.update_from_config(db, empty_config)
|
||||||
assert db.query(Task).count() == 2
|
assert db.query(Task).count() == 2
|
||||||
|
|
||||||
website = schemas.config.Website(
|
|
||||||
domain=task.domain,
|
|
||||||
paths=[
|
|
||||||
dict(
|
|
||||||
path="https://another-example.com", checks=[{task.check: task.expected}]
|
|
||||||
),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
empty_config.websites = [website]
|
|
||||||
|
|
||||||
await queries.update_from_config(db, empty_config)
|
|
||||||
assert db.query(Task).count() == 1
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_update_from_config_db_updates_existing_tasks(db, empty_config, task):
|
async def test_update_from_config_db_updates_existing_tasks(db, empty_config, task): # pylint: disable-msg=redefined-outer-name
|
||||||
assert db.query(Task).count() == 1
|
assert db.query(Task).count() == 1
|
||||||
|
|
||||||
website = schemas.config.Website(
|
website = schemas.config.Website(
|
||||||
domain=task.domain,
|
domain=task.domain,
|
||||||
paths=[
|
paths=[{"path": task.url, "checks": [{task.check: task.expected}]}],
|
||||||
dict(path=task.url, checks=[{task.check: task.expected}]),
|
|
||||||
],
|
|
||||||
)
|
)
|
||||||
empty_config.websites = [website]
|
empty_config.websites = [website]
|
||||||
|
|
||||||
await queries.update_from_config(db, empty_config)
|
await queries.update_from_config(db, empty_config)
|
||||||
assert db.query(Task).count() == 1
|
assert db.query(Task).count() == 2
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_reschedule_all(
|
async def test_reschedule_all(
|
||||||
db, ten_tasks, ten_warning_tasks, ten_critical_tasks, ten_ok_tasks
|
db,
|
||||||
|
ten_tasks,
|
||||||
|
ten_warning_tasks,
|
||||||
|
ten_critical_tasks,
|
||||||
|
ten_ok_tasks, # pylint: disable-msg=redefined-outer-name
|
||||||
):
|
):
|
||||||
assert db.query(Task).count() == 40
|
assert db.query(Task).count() == 40
|
||||||
assert db.query(Task).filter(Task.severity == "unknown").count() == 10
|
assert db.query(Task).filter(Task.severity == "unknown").count() == 10
|
||||||
|
@ -154,30 +161,81 @@ async def test_reschedule_all(
|
||||||
assert db.query(Task).filter(Task.next_run <= one_hour_ago).count() == 30
|
assert db.query(Task).filter(Task.next_run <= one_hour_ago).count() == 30
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_add_user(db):
|
||||||
|
users = await queries.list_users(db)
|
||||||
|
assert users.count() == 0
|
||||||
|
|
||||||
|
_user = await queries.add_user(db, "john", "doe")
|
||||||
|
assert _user.username == "john"
|
||||||
|
assert _user.password == "doe"
|
||||||
|
assert _user.disabled == False
|
||||||
|
assert _user.created_at is not None
|
||||||
|
assert _user.updated_at is None
|
||||||
|
assert _user.last_login_at is None
|
||||||
|
|
||||||
|
_user = await queries.get_user(db, "morgan")
|
||||||
|
assert _user is None
|
||||||
|
|
||||||
|
_user = await queries.get_user(db, "john")
|
||||||
|
assert _user.username == "john"
|
||||||
|
assert _user.password == "doe"
|
||||||
|
assert _user.disabled == False
|
||||||
|
assert _user.created_at is not None
|
||||||
|
assert _user.updated_at is None
|
||||||
|
assert _user.last_login_at is None
|
||||||
|
|
||||||
|
users = await queries.list_users(db)
|
||||||
|
assert users.count() == 1
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_remove_user(db, user): # pylint: disable-msg=redefined-outer-name
|
||||||
|
users = await queries.list_users(db)
|
||||||
|
assert users.count() == 1
|
||||||
|
|
||||||
|
assert user.username == "jane"
|
||||||
|
assert user.password == "doe"
|
||||||
|
assert user.disabled == False
|
||||||
|
assert user.created_at is not None
|
||||||
|
assert user.updated_at is None
|
||||||
|
assert user.last_login_at is None
|
||||||
|
|
||||||
|
db.delete(user)
|
||||||
|
db.commit()
|
||||||
|
|
||||||
|
users = await queries.list_users(db)
|
||||||
|
assert users.count() == 0
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def task(db):
|
def task(db):
|
||||||
task = Task(
|
_task = Task(
|
||||||
url="https://www.example.com",
|
url="https://www.example.com",
|
||||||
domain="https://www.example.com",
|
domain="https://www.example.com",
|
||||||
|
ip_version="6",
|
||||||
check="body-contains",
|
check="body-contains",
|
||||||
expected="foo",
|
expected="foo",
|
||||||
frequency=1,
|
frequency=1,
|
||||||
)
|
)
|
||||||
db.add(task)
|
db.add(_task)
|
||||||
db.commit()
|
db.commit()
|
||||||
return task
|
return _task
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def empty_config():
|
def empty_config():
|
||||||
return schemas.config.Config(
|
return schemas.config.Config(
|
||||||
general=schemas.config.General(
|
general=schemas.config.General(
|
||||||
|
db=schemas.config.DbSettings(url="sqlite:////tmp/test-argos.db"),
|
||||||
|
cookie_secret="foo-bar-baz",
|
||||||
frequency="1m",
|
frequency="1m",
|
||||||
alerts=schemas.config.Alert(
|
alerts=schemas.config.Alert(
|
||||||
ok=["", ""],
|
ok=["", ""],
|
||||||
warning=["", ""],
|
warning=["", ""],
|
||||||
critical=["", ""],
|
critical=["", ""],
|
||||||
unknown=["", ""],
|
unknown=["", ""],
|
||||||
|
no_agent=["", ""],
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
service=schemas.config.Service(
|
service=schemas.config.Service(
|
||||||
|
@ -186,14 +244,19 @@ def empty_config():
|
||||||
]
|
]
|
||||||
),
|
),
|
||||||
ssl=schemas.config.SSL(thresholds=[]),
|
ssl=schemas.config.SSL(thresholds=[]),
|
||||||
|
recurring_tasks=schemas.config.RecurringTasks(
|
||||||
|
max_results_age="6s",
|
||||||
|
max_lock_seconds=120,
|
||||||
|
time_without_agent=300,
|
||||||
|
),
|
||||||
websites=[],
|
websites=[],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def ten_results(db, task):
|
def ten_results(db, task): # pylint: disable-msg=redefined-outer-name
|
||||||
results = []
|
results = []
|
||||||
for i in range(10):
|
for _ in range(10):
|
||||||
result = Result(
|
result = Result(
|
||||||
submitted_at=datetime.now(),
|
submitted_at=datetime.now(),
|
||||||
status="success",
|
status="success",
|
||||||
|
@ -212,18 +275,19 @@ def ten_results(db, task):
|
||||||
def ten_locked_tasks(db):
|
def ten_locked_tasks(db):
|
||||||
a_minute_ago = datetime.now() - timedelta(minutes=1)
|
a_minute_ago = datetime.now() - timedelta(minutes=1)
|
||||||
tasks = []
|
tasks = []
|
||||||
for i in range(10):
|
for _ in range(10):
|
||||||
task = Task(
|
_task = Task(
|
||||||
url="https://www.example.com",
|
url="https://www.example.com",
|
||||||
domain="example.com",
|
domain="example.com",
|
||||||
|
ip_version="6",
|
||||||
check="body-contains",
|
check="body-contains",
|
||||||
expected="foo",
|
expected="foo",
|
||||||
frequency=1,
|
frequency=1,
|
||||||
selected_by="test",
|
selected_by="test",
|
||||||
selected_at=a_minute_ago,
|
selected_at=a_minute_ago,
|
||||||
)
|
)
|
||||||
db.add(task)
|
db.add(_task)
|
||||||
tasks.append(task)
|
tasks.append(_task)
|
||||||
db.commit()
|
db.commit()
|
||||||
return tasks
|
return tasks
|
||||||
|
|
||||||
|
@ -232,18 +296,19 @@ def ten_locked_tasks(db):
|
||||||
def ten_tasks(db):
|
def ten_tasks(db):
|
||||||
now = datetime.now()
|
now = datetime.now()
|
||||||
tasks = []
|
tasks = []
|
||||||
for i in range(10):
|
for _ in range(10):
|
||||||
task = Task(
|
_task = Task(
|
||||||
url="https://www.example.com",
|
url="https://www.example.com",
|
||||||
domain="example.com",
|
domain="example.com",
|
||||||
|
ip_version="6",
|
||||||
check="body-contains",
|
check="body-contains",
|
||||||
expected="foo",
|
expected="foo",
|
||||||
frequency=1,
|
frequency=1,
|
||||||
selected_by="test",
|
selected_by="test",
|
||||||
selected_at=now,
|
selected_at=now,
|
||||||
)
|
)
|
||||||
db.add(task)
|
db.add(_task)
|
||||||
tasks.append(task)
|
tasks.append(_task)
|
||||||
db.commit()
|
db.commit()
|
||||||
return tasks
|
return tasks
|
||||||
|
|
||||||
|
@ -252,18 +317,19 @@ def ten_tasks(db):
|
||||||
def ten_warning_tasks(db):
|
def ten_warning_tasks(db):
|
||||||
now = datetime.now()
|
now = datetime.now()
|
||||||
tasks = []
|
tasks = []
|
||||||
for i in range(10):
|
for _ in range(10):
|
||||||
task = Task(
|
_task = Task(
|
||||||
url="https://www.example.com",
|
url="https://www.example.com",
|
||||||
domain="example.com",
|
domain="example.com",
|
||||||
|
ip_version="6",
|
||||||
check="body-contains",
|
check="body-contains",
|
||||||
expected="foo",
|
expected="foo",
|
||||||
frequency=1,
|
frequency=1,
|
||||||
next_run=now,
|
next_run=now,
|
||||||
severity="warning",
|
severity="warning",
|
||||||
)
|
)
|
||||||
db.add(task)
|
db.add(_task)
|
||||||
tasks.append(task)
|
tasks.append(_task)
|
||||||
db.commit()
|
db.commit()
|
||||||
return tasks
|
return tasks
|
||||||
|
|
||||||
|
@ -272,18 +338,19 @@ def ten_warning_tasks(db):
|
||||||
def ten_critical_tasks(db):
|
def ten_critical_tasks(db):
|
||||||
now = datetime.now()
|
now = datetime.now()
|
||||||
tasks = []
|
tasks = []
|
||||||
for i in range(10):
|
for _ in range(10):
|
||||||
task = Task(
|
_task = Task(
|
||||||
url="https://www.example.com",
|
url="https://www.example.com",
|
||||||
domain="example.com",
|
domain="example.com",
|
||||||
|
ip_version="6",
|
||||||
check="body-contains",
|
check="body-contains",
|
||||||
expected="foo",
|
expected="foo",
|
||||||
frequency=1,
|
frequency=1,
|
||||||
next_run=now,
|
next_run=now,
|
||||||
severity="critical",
|
severity="critical",
|
||||||
)
|
)
|
||||||
db.add(task)
|
db.add(_task)
|
||||||
tasks.append(task)
|
tasks.append(_task)
|
||||||
db.commit()
|
db.commit()
|
||||||
return tasks
|
return tasks
|
||||||
|
|
||||||
|
@ -292,17 +359,30 @@ def ten_critical_tasks(db):
|
||||||
def ten_ok_tasks(db):
|
def ten_ok_tasks(db):
|
||||||
now = datetime.now()
|
now = datetime.now()
|
||||||
tasks = []
|
tasks = []
|
||||||
for i in range(10):
|
for _ in range(10):
|
||||||
task = Task(
|
_task = Task(
|
||||||
url="https://www.example.com",
|
url="https://www.example.com",
|
||||||
domain="example.com",
|
domain="example.com",
|
||||||
|
ip_version="6",
|
||||||
check="body-contains",
|
check="body-contains",
|
||||||
expected="foo",
|
expected="foo",
|
||||||
frequency=1,
|
frequency=1,
|
||||||
next_run=now,
|
next_run=now,
|
||||||
severity="ok",
|
severity="ok",
|
||||||
)
|
)
|
||||||
db.add(task)
|
db.add(_task)
|
||||||
tasks.append(task)
|
tasks.append(_task)
|
||||||
db.commit()
|
db.commit()
|
||||||
return tasks
|
return tasks
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def user(db):
|
||||||
|
_user = User(
|
||||||
|
username="jane",
|
||||||
|
password="doe",
|
||||||
|
disabled=False,
|
||||||
|
)
|
||||||
|
db.add(_user)
|
||||||
|
db.commit()
|
||||||
|
return _user
|
||||||
|
|
|
@ -1,51 +0,0 @@
|
||||||
import pytest
|
|
||||||
|
|
||||||
from argos.schemas.utils import string_to_duration
|
|
||||||
|
|
||||||
|
|
||||||
def test_string_to_duration_days():
|
|
||||||
assert string_to_duration("1d", target="days") == 1
|
|
||||||
assert string_to_duration("1w", target="days") == 7
|
|
||||||
assert string_to_duration("3w", target="days") == 21
|
|
||||||
assert string_to_duration("3mo", target="days") == 90
|
|
||||||
assert string_to_duration("1y", target="days") == 365
|
|
||||||
|
|
||||||
with pytest.raises(ValueError):
|
|
||||||
string_to_duration("3h", target="days")
|
|
||||||
|
|
||||||
with pytest.raises(ValueError):
|
|
||||||
string_to_duration("1", target="days")
|
|
||||||
|
|
||||||
|
|
||||||
def test_string_to_duration_hours():
|
|
||||||
assert string_to_duration("1h", target="hours") == 1
|
|
||||||
assert string_to_duration("1d", target="hours") == 24
|
|
||||||
assert string_to_duration("1w", target="hours") == 7 * 24
|
|
||||||
assert string_to_duration("3w", target="hours") == 21 * 24
|
|
||||||
assert string_to_duration("3mo", target="hours") == 3 * 30 * 24
|
|
||||||
|
|
||||||
with pytest.raises(ValueError):
|
|
||||||
string_to_duration("1", target="hours")
|
|
||||||
|
|
||||||
|
|
||||||
def test_string_to_duration_minutes():
|
|
||||||
assert string_to_duration("1m", target="minutes") == 1
|
|
||||||
assert string_to_duration("1h", target="minutes") == 60
|
|
||||||
assert string_to_duration("1d", target="minutes") == 60 * 24
|
|
||||||
assert string_to_duration("3mo", target="minutes") == 60 * 24 * 30 * 3
|
|
||||||
|
|
||||||
with pytest.raises(ValueError):
|
|
||||||
string_to_duration("1", target="minutes")
|
|
||||||
|
|
||||||
|
|
||||||
def test_conversion_to_greater_units_throws():
|
|
||||||
# hours and minutes cannot be converted to days
|
|
||||||
with pytest.raises(ValueError):
|
|
||||||
string_to_duration("1h", target="days")
|
|
||||||
|
|
||||||
with pytest.raises(ValueError):
|
|
||||||
string_to_duration("1m", target="days")
|
|
||||||
|
|
||||||
# minutes cannot be converted to hours
|
|
||||||
with pytest.raises(ValueError):
|
|
||||||
string_to_duration("1m", target="hours")
|
|
|
@ -1,6 +1,7 @@
|
||||||
|
---
|
||||||
- domain: "https://mypads.framapad.org"
|
- domain: "https://mypads.framapad.org"
|
||||||
paths:
|
paths:
|
||||||
- path: "/mypads/"
|
- path: "/mypads/"
|
||||||
checks:
|
checks:
|
||||||
- status-is: 200
|
- status-is: 200
|
||||||
- body-contains: '<div id= "mypads"></div>'
|
- body-contains: '<div id= "mypads"></div>'
|
||||||
|
|
Loading…
Reference in a new issue