Working SSL checks, refactoring of the codebase.

- Start implementing some tests using pytest
- Packaged using pyproject.toml
- Implemented SSL checks using httpx
- Checks can now run partially on the server, to access the configuration and determine the severity of the error if any
- Used black to format all the files
- Added an utility to convert strings like "3d" and "3w" to days
- The internal representation of SSL thresholds is now a list of tuples
- Models were lacking some relationship between Tasks and Results
This commit is contained in:
Alexis Métaireau 2023-10-09 19:33:58 +02:00
parent d3c4f1e87b
commit 42ec15c6f4
20 changed files with 658 additions and 173 deletions

3
.gitignore vendored Normal file
View file

@ -0,0 +1,3 @@
__pycache__
*.egg-info
.vscode

View file

@ -11,8 +11,10 @@ pyyaml = "*"
httpx = "*" httpx = "*"
click = "*" click = "*"
aiosqlite = "*" aiosqlite = "*"
sqlalchemy = {extras = ["asyncio"], version = "*"} sqlalchemy = {extras = ["asyncio"] }
pyopenssl = "*" pyopenssl = "*"
ipdb = "*"
argos = {extras = ["dev"], file = ".", editable = true}
[dev-packages] [dev-packages]

270
Pipfile.lock generated
View file

@ -1,7 +1,7 @@
{ {
"_meta": { "_meta": {
"hash": { "hash": {
"sha256": "e6eaf14f53ea7b88c8245712c5639fa870ba5c7418f3f12697422d510386e6fc" "sha256": "545ec239a057ec56cb3b4e5d7d6b8922a837d9ce2340e4b2def368c8064acf73"
}, },
"pipfile-spec": 6, "pipfile-spec": 6,
"requires": { "requires": {
@ -27,11 +27,11 @@
}, },
"annotated-types": { "annotated-types": {
"hashes": [ "hashes": [
"sha256:47cdc3490d9ac1506ce92c7aaa76c579dc3509ff11e098fc867e5130ab7be802", "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43",
"sha256:58da39888f92c276ad970249761ebea80ba544b77acddaa1a4d6cf78287d45fd" "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"
], ],
"markers": "python_version >= '3.7'", "markers": "python_version >= '3.8'",
"version": "==0.5.0" "version": "==0.6.0"
}, },
"anyio": { "anyio": {
"hashes": [ "hashes": [
@ -41,6 +41,65 @@
"markers": "python_version >= '3.7'", "markers": "python_version >= '3.7'",
"version": "==3.7.1" "version": "==3.7.1"
}, },
"appnope": {
"hashes": [
"sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24",
"sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"
],
"markers": "sys_platform == 'darwin'",
"version": "==0.1.3"
},
"argos": {
"editable": true,
"extras": [
"dev"
],
"file": "."
},
"asttokens": {
"hashes": [
"sha256:2e0171b991b2c959acc6c49318049236844a5da1d65ba2672c4880c1c894834e",
"sha256:cf8fc9e61a86461aa9fb161a14a0841a03c405fa829ac6b202670b3495d2ce69"
],
"version": "==2.4.0"
},
"backcall": {
"hashes": [
"sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e",
"sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"
],
"version": "==0.2.0"
},
"black": {
"hashes": [
"sha256:064101748afa12ad2291c2b91c960be28b817c0c7eaa35bec09cc63aa56493c5",
"sha256:0945e13506be58bf7db93ee5853243eb368ace1c08a24c65ce108986eac65915",
"sha256:11c410f71b876f961d1de77b9699ad19f939094c3a677323f43d7a29855fe326",
"sha256:1c7b8d606e728a41ea1ccbd7264677e494e87cf630e399262ced92d4a8dac940",
"sha256:1d06691f1eb8de91cd1b322f21e3bfc9efe0c7ca1f0e1eb1db44ea367dff656b",
"sha256:3238f2aacf827d18d26db07524e44741233ae09a584273aa059066d644ca7b30",
"sha256:32daa9783106c28815d05b724238e30718f34155653d4d6e125dc7daec8e260c",
"sha256:35d1381d7a22cc5b2be2f72c7dfdae4072a3336060635718cc7e1ede24221d6c",
"sha256:3a150542a204124ed00683f0db1f5cf1c2aaaa9cc3495b7a3b5976fb136090ab",
"sha256:48f9d345675bb7fbc3dd85821b12487e1b9a75242028adad0333ce36ed2a6d27",
"sha256:50cb33cac881766a5cd9913e10ff75b1e8eb71babf4c7104f2e9c52da1fb7de2",
"sha256:562bd3a70495facf56814293149e51aa1be9931567474993c7942ff7d3533961",
"sha256:67de8d0c209eb5b330cce2469503de11bca4085880d62f1628bd9972cc3366b9",
"sha256:6b39abdfb402002b8a7d030ccc85cf5afff64ee90fa4c5aebc531e3ad0175ddb",
"sha256:6f3c333ea1dd6771b2d3777482429864f8e258899f6ff05826c3a4fcc5ce3f70",
"sha256:714290490c18fb0126baa0fca0a54ee795f7502b44177e1ce7624ba1c00f2331",
"sha256:7c3eb7cea23904399866c55826b31c1f55bbcd3890ce22ff70466b907b6775c2",
"sha256:92c543f6854c28a3c7f39f4d9b7694f9a6eb9d3c5e2ece488c327b6e7ea9b266",
"sha256:a6f6886c9869d4daae2d1715ce34a19bbc4b95006d20ed785ca00fa03cba312d",
"sha256:a8a968125d0a6a404842fa1bf0b349a568634f856aa08ffaff40ae0dfa52e7c6",
"sha256:c7ab5790333c448903c4b721b59c0d80b11fe5e9803d8703e84dcb8da56fec1b",
"sha256:e114420bf26b90d4b9daa597351337762b63039752bdf72bf361364c1aa05925",
"sha256:e198cf27888ad6f4ff331ca1c48ffc038848ea9f031a3b40ba36aced7e22f2c8",
"sha256:ec751418022185b0c1bb7d7736e6933d40bbb14c14a0abcf9123d1b159f98dd4",
"sha256:f0bd2f4a58d6666500542b26354978218a9babcdc972722f4bf90779524515f3"
],
"version": "==23.3.0"
},
"certifi": { "certifi": {
"hashes": [ "hashes": [
"sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082", "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082",
@ -145,6 +204,21 @@
"markers": "python_version >= '3.7'", "markers": "python_version >= '3.7'",
"version": "==41.0.4" "version": "==41.0.4"
}, },
"decorator": {
"hashes": [
"sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330",
"sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"
],
"markers": "python_version >= '3.11'",
"version": "==5.1.1"
},
"executing": {
"hashes": [
"sha256:06df6183df67389625f4e763921c6cf978944721abf3e714000200aab95b0657",
"sha256:0ff053696fdeef426cda5bd18eacd94f82c91f49823a2e9090124212ceea9b08"
],
"version": "==2.0.0"
},
"fastapi": { "fastapi": {
"hashes": [ "hashes": [
"sha256:3270de872f0fe9ec809d4bd3d4d890c6d5cc7b9611d721d6438f9dacc8c4ef2e", "sha256:3270de872f0fe9ec809d4bd3d4d890c6d5cc7b9611d721d6438f9dacc8c4ef2e",
@ -254,6 +328,139 @@
"markers": "python_version >= '3.5'", "markers": "python_version >= '3.5'",
"version": "==3.4" "version": "==3.4"
}, },
"iniconfig": {
"hashes": [
"sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3",
"sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"
],
"markers": "python_version >= '3.7'",
"version": "==2.0.0"
},
"ipdb": {
"hashes": [
"sha256:45529994741c4ab6d2388bfa5d7b725c2cf7fe9deffabdb8a6113aa5ed449ed4",
"sha256:e3ac6018ef05126d442af680aad863006ec19d02290561ac88b8b1c0b0cfc726"
],
"index": "pypi",
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==0.13.13"
},
"ipython": {
"hashes": [
"sha256:0852469d4d579d9cd613c220af7bf0c9cc251813e12be647cb9d463939db9b1e",
"sha256:ad52f58fca8f9f848e256c629eff888efc0528c12fe0f8ec14f33205f23ef938"
],
"markers": "python_version >= '3.11'",
"version": "==8.16.1"
},
"isort": {
"hashes": [
"sha256:6be1f76a507cb2ecf16c7cf14a37e41609ca082330be4e3436a18ef74add55db",
"sha256:ba1d72fb2595a01c7895a5128f9585a5cc4b6d395f1c8d514989b9a7eb2a8746"
],
"version": "==5.11.5"
},
"jedi": {
"hashes": [
"sha256:cf0496f3651bc65d7174ac1b7d043eff454892c708a87d1b683e57b569927ffd",
"sha256:e983c654fe5c02867aef4cdfce5a2fbb4a50adc0af145f70504238f18ef5e7e0"
],
"markers": "python_version >= '3.6'",
"version": "==0.19.1"
},
"matplotlib-inline": {
"hashes": [
"sha256:f1f41aab5328aa5aaea9b16d083b128102f8712542f819fe7e6a420ff581b311",
"sha256:f887e5f10ba98e8d2b150ddcf4702c1e5f8b3a20005eb0f74bfdbd360ee6f304"
],
"markers": "python_version >= '3.5'",
"version": "==0.1.6"
},
"mypy-extensions": {
"hashes": [
"sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d",
"sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"
],
"markers": "python_version >= '3.5'",
"version": "==1.0.0"
},
"packaging": {
"hashes": [
"sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5",
"sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"
],
"markers": "python_version >= '3.7'",
"version": "==23.2"
},
"parso": {
"hashes": [
"sha256:8c07be290bb59f03588915921e29e8a50002acaf2cdc5fa0e0114f91709fafa0",
"sha256:c001d4636cd3aecdaf33cbb40aebb59b094be2a74c556778ef5576c175e19e75"
],
"markers": "python_version >= '3.6'",
"version": "==0.8.3"
},
"pathspec": {
"hashes": [
"sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20",
"sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"
],
"markers": "python_version >= '3.7'",
"version": "==0.11.2"
},
"pexpect": {
"hashes": [
"sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937",
"sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"
],
"markers": "sys_platform != 'win32'",
"version": "==4.8.0"
},
"pickleshare": {
"hashes": [
"sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca",
"sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"
],
"version": "==0.7.5"
},
"platformdirs": {
"hashes": [
"sha256:cf8ee52a3afdb965072dcc652433e0c7e3e40cf5ea1477cd4b3b1d2eb75495b3",
"sha256:e9d171d00af68be50e9202731309c4e658fd8bc76f55c11c7dd760d023bda68e"
],
"markers": "python_version >= '3.7'",
"version": "==3.11.0"
},
"pluggy": {
"hashes": [
"sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12",
"sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"
],
"markers": "python_version >= '3.8'",
"version": "==1.3.0"
},
"prompt-toolkit": {
"hashes": [
"sha256:04505ade687dc26dc4284b1ad19a83be2f2afe83e7a828ace0c72f3a1df72aac",
"sha256:9dffbe1d8acf91e3de75f3b544e4842382fc06c6babe903ac9acb74dc6e08d88"
],
"markers": "python_full_version >= '3.7.0'",
"version": "==3.0.39"
},
"ptyprocess": {
"hashes": [
"sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35",
"sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"
],
"version": "==0.7.0"
},
"pure-eval": {
"hashes": [
"sha256:01eaab343580944bc56080ebe0a674b39ec44a945e6d09ba7db3cb8cec289350",
"sha256:2b45320af6dfaa1750f543d714b6d1c520a1688dec6fd24d339063ce0aaa9ac3"
],
"version": "==0.2.2"
},
"pycparser": { "pycparser": {
"hashes": [ "hashes": [
"sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9", "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9",
@ -381,6 +588,14 @@
"markers": "python_version >= '3.7'", "markers": "python_version >= '3.7'",
"version": "==2.10.1" "version": "==2.10.1"
}, },
"pygments": {
"hashes": [
"sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692",
"sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"
],
"markers": "python_version >= '3.7'",
"version": "==2.16.1"
},
"pyopenssl": { "pyopenssl": {
"hashes": [ "hashes": [
"sha256:24f0dc5227396b3e831f4c7f602b950a5e9833d292c8e4a2e06b709292806ae2", "sha256:24f0dc5227396b3e831f4c7f602b950a5e9833d292c8e4a2e06b709292806ae2",
@ -390,6 +605,13 @@
"markers": "python_version >= '3.6'", "markers": "python_version >= '3.6'",
"version": "==23.2.0" "version": "==23.2.0"
}, },
"pytest": {
"hashes": [
"sha256:1d881c6124e08ff0a1bb75ba3ec0bfd8b5354a01c194ddd5a0a870a48d99b002",
"sha256:a766259cfab564a2ad52cb1aae1b881a75c3eb7e34ca3779697c23ed47c47069"
],
"version": "==7.4.2"
},
"pyyaml": { "pyyaml": {
"hashes": [ "hashes": [
"sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5", "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5",
@ -447,6 +669,14 @@
"markers": "python_version >= '3.6'", "markers": "python_version >= '3.6'",
"version": "==6.0.1" "version": "==6.0.1"
}, },
"six": {
"hashes": [
"sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926",
"sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"
],
"markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'",
"version": "==1.16.0"
},
"sniffio": { "sniffio": {
"hashes": [ "hashes": [
"sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101", "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101",
@ -465,6 +695,7 @@
"sha256:05b971ab1ac2994a14c56b35eaaa91f86ba080e9ad481b20d99d77f381bb6258", "sha256:05b971ab1ac2994a14c56b35eaaa91f86ba080e9ad481b20d99d77f381bb6258",
"sha256:141675dae56522126986fa4ca713739d00ed3a6f08f3c2eb92c39c6dfec463ce", "sha256:141675dae56522126986fa4ca713739d00ed3a6f08f3c2eb92c39c6dfec463ce",
"sha256:1e7dc99b23e33c71d720c4ae37ebb095bebebbd31a24b7d99dfc4753d2803ede", "sha256:1e7dc99b23e33c71d720c4ae37ebb095bebebbd31a24b7d99dfc4753d2803ede",
"sha256:2a1f7ffac934bc0ea717fa1596f938483fb8c402233f9b26679b4f7b38d6ab6e",
"sha256:2e617727fe4091cedb3e4409b39368f424934c7faa78171749f704b49b4bb4ce", "sha256:2e617727fe4091cedb3e4409b39368f424934c7faa78171749f704b49b4bb4ce",
"sha256:3cf229704074bce31f7f47d12883afee3b0a02bb233a0ba45ddbfe542939cca4", "sha256:3cf229704074bce31f7f47d12883afee3b0a02bb233a0ba45ddbfe542939cca4",
"sha256:3eb7c03fe1cd3255811cd4e74db1ab8dca22074d50cd8937edf4ef62d758cdf4", "sha256:3eb7c03fe1cd3255811cd4e74db1ab8dca22074d50cd8937edf4ef62d758cdf4",
@ -474,6 +705,9 @@
"sha256:4615623a490e46be85fbaa6335f35cf80e61df0783240afe7d4f544778c315a9", "sha256:4615623a490e46be85fbaa6335f35cf80e61df0783240afe7d4f544778c315a9",
"sha256:50a69067af86ec7f11a8e50ba85544657b1477aabf64fa447fd3736b5a0a4f67", "sha256:50a69067af86ec7f11a8e50ba85544657b1477aabf64fa447fd3736b5a0a4f67",
"sha256:513fd5b6513d37e985eb5b7ed89da5fd9e72354e3523980ef00d439bc549c9e9", "sha256:513fd5b6513d37e985eb5b7ed89da5fd9e72354e3523980ef00d439bc549c9e9",
"sha256:526b869a0f4f000d8d8ee3409d0becca30ae73f494cbb48801da0129601f72c6",
"sha256:56628ca27aa17b5890391ded4e385bf0480209726f198799b7e980c6bd473bd7",
"sha256:632784f7a6f12cfa0e84bf2a5003b07660addccf5563c132cd23b7cc1d7371a9",
"sha256:6ff3dc2f60dbf82c9e599c2915db1526d65415be323464f84de8db3e361ba5b9", "sha256:6ff3dc2f60dbf82c9e599c2915db1526d65415be323464f84de8db3e361ba5b9",
"sha256:73c079e21d10ff2be54a4699f55865d4b275fd6c8bd5d90c5b1ef78ae0197301", "sha256:73c079e21d10ff2be54a4699f55865d4b275fd6c8bd5d90c5b1ef78ae0197301",
"sha256:7614f1eab4336df7dd6bee05bc974f2b02c38d3d0c78060c5faa4cd1ca2af3b8", "sha256:7614f1eab4336df7dd6bee05bc974f2b02c38d3d0c78060c5faa4cd1ca2af3b8",
@ -490,14 +724,18 @@
"sha256:b69f1f754d92eb1cc6b50938359dead36b96a1dcf11a8670bff65fd9b21a4b09", "sha256:b69f1f754d92eb1cc6b50938359dead36b96a1dcf11a8670bff65fd9b21a4b09",
"sha256:b977bfce15afa53d9cf6a632482d7968477625f030d86a109f7bdfe8ce3c064a", "sha256:b977bfce15afa53d9cf6a632482d7968477625f030d86a109f7bdfe8ce3c064a",
"sha256:bf8eebccc66829010f06fbd2b80095d7872991bfe8415098b9fe47deaaa58063", "sha256:bf8eebccc66829010f06fbd2b80095d7872991bfe8415098b9fe47deaaa58063",
"sha256:bfece2f7cec502ec5f759bbc09ce711445372deeac3628f6fa1c16b7fb45b682",
"sha256:c111cd40910ffcb615b33605fc8f8e22146aeb7933d06569ac90f219818345ef", "sha256:c111cd40910ffcb615b33605fc8f8e22146aeb7933d06569ac90f219818345ef",
"sha256:c2d494b6a2a2d05fb99f01b84cc9af9f5f93bf3e1e5dbdafe4bed0c2823584c1", "sha256:c2d494b6a2a2d05fb99f01b84cc9af9f5f93bf3e1e5dbdafe4bed0c2823584c1",
"sha256:c9cba4e7369de663611ce7460a34be48e999e0bbb1feb9130070f0685e9a6b66", "sha256:c9cba4e7369de663611ce7460a34be48e999e0bbb1feb9130070f0685e9a6b66",
"sha256:cca720d05389ab1a5877ff05af96551e58ba65e8dc65582d849ac83ddde3e231", "sha256:cca720d05389ab1a5877ff05af96551e58ba65e8dc65582d849ac83ddde3e231",
"sha256:ccb99c3138c9bde118b51a289d90096a3791658da9aea1754667302ed6564f6e", "sha256:ccb99c3138c9bde118b51a289d90096a3791658da9aea1754667302ed6564f6e",
"sha256:d59cb9e20d79686aa473e0302e4a82882d7118744d30bb1dfb62d3c47141b3ec", "sha256:d59cb9e20d79686aa473e0302e4a82882d7118744d30bb1dfb62d3c47141b3ec",
"sha256:db726be58837fe5ac39859e0fa40baafe54c6d54c02aba1d47d25536170b690f",
"sha256:e36339a68126ffb708dc6d1948161cea2a9e85d7d7b0c54f6999853d70d44430", "sha256:e36339a68126ffb708dc6d1948161cea2a9e85d7d7b0c54f6999853d70d44430",
"sha256:e7421c1bfdbb7214313919472307be650bd45c4dc2fcb317d64d078993de045b",
"sha256:ea7da25ee458d8f404b93eb073116156fd7d8c2a776d8311534851f28277b4ce", "sha256:ea7da25ee458d8f404b93eb073116156fd7d8c2a776d8311534851f28277b4ce",
"sha256:f6f7276cf26145a888f2182a98f204541b519d9ea358a65d82095d9c9e22f917",
"sha256:f9fefd6298433b6e9188252f3bff53b9ff0443c8fde27298b8a2b19f6617eeb9", "sha256:f9fefd6298433b6e9188252f3bff53b9ff0443c8fde27298b8a2b19f6617eeb9",
"sha256:fb87f763b5d04a82ae84ccff25554ffd903baafba6698e18ebaf32561f2fe4aa", "sha256:fb87f763b5d04a82ae84ccff25554ffd903baafba6698e18ebaf32561f2fe4aa",
"sha256:fc6b15465fabccc94bf7e38777d665b6a4f95efd1725049d6184b3a39fd54880" "sha256:fc6b15465fabccc94bf7e38777d665b6a4f95efd1725049d6184b3a39fd54880"
@ -514,6 +752,13 @@
"markers": "python_version >= '3.6'", "markers": "python_version >= '3.6'",
"version": "==0.41.1" "version": "==0.41.1"
}, },
"stack-data": {
"hashes": [
"sha256:836a778de4fec4dcd1dcd89ed8abff8a221f58308462e1c4aa2a3cf30148f0b9",
"sha256:d5558e0c25a4cb0853cddad3d77da9891a08cb85dd9f9f91b9f8cd66e511e695"
],
"version": "==0.6.3"
},
"starlette": { "starlette": {
"hashes": [ "hashes": [
"sha256:6a6b0d042acb8d469a01eba54e9cda6cbd24ac602c4cd016723117d6a7e73b75", "sha256:6a6b0d042acb8d469a01eba54e9cda6cbd24ac602c4cd016723117d6a7e73b75",
@ -522,6 +767,14 @@
"markers": "python_version >= '3.7'", "markers": "python_version >= '3.7'",
"version": "==0.27.0" "version": "==0.27.0"
}, },
"traitlets": {
"hashes": [
"sha256:7564b5bf8d38c40fa45498072bf4dc5e8346eb087bbf1e2ae2d8774f6a0f078e",
"sha256:98277f247f18b2c5cabaf4af369187754f4fb0e85911d473f72329db8a7f4fae"
],
"markers": "python_version >= '3.8'",
"version": "==5.11.2"
},
"typing-extensions": { "typing-extensions": {
"hashes": [ "hashes": [
"sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0", "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0",
@ -538,6 +791,13 @@
"index": "pypi", "index": "pypi",
"markers": "python_version >= '3.8'", "markers": "python_version >= '3.8'",
"version": "==0.23.2" "version": "==0.23.2"
},
"wcwidth": {
"hashes": [
"sha256:77f719e01648ed600dfa5402c347481c0992263b81a027344f3e1ba25493a704",
"sha256:8705c569999ffbb4f6a87c6d1b80f324bd6db952f5eb0b95bc07517f4c1813d4"
],
"version": "==0.2.8"
} }
}, },
"develop": {} "develop": {}

View file

@ -12,10 +12,12 @@ Features :
- [x] Multiple paths per websites can be tested ; - [x] Multiple paths per websites can be tested ;
- [x] Handle jobs failures on the clients - [x] Handle jobs failures on the clients
- [x] Exposes an HTTP API that can be consumed by other systems ; - [x] Exposes an HTTP API that can be consumed by other systems ;
- [x] Checks can be distributed on the network thanks to a job queue ;
- [x] Change the naming and use service/agent.
- [x] Packaging (and `argos agent` / `argos service` commands)
- [ ] Local task for database cleanup (to run periodically)
- [ ] Handles multiple alerting backends (email, sms, gotify) ; - [ ] Handles multiple alerting backends (email, sms, gotify) ;
- [ ] Exposes a simple read-only website. - [ ] Exposes a simple read-only website.
- [ ] Packaging (and argos-client / argos-server commands)
- [ ] Checks can be distributed on the network thanks to a job queue ;
Implemented checks : Implemented checks :
@ -46,14 +48,12 @@ pipenv run uvicorn argos.server:app --reload
The server will read a `config.yaml` file at startup, and will populate the tasks specified in it. See the configuration section below for more information on how to configure the checks you want to run. The server will read a `config.yaml` file at startup, and will populate the tasks specified in it. See the configuration section below for more information on how to configure the checks you want to run.
And here is how to run the client: And here is how to run the agent:
```bash ```bash
pipenv run python -m argos.client.cli --server http://localhost:8000 pipenv run argos-agent --server http://localhost:8000
``` ```
NB: `argos-server` and `argos-client` commands will be provided in the future.
## Configuration ## Configuration
Here is a simple configuration file: Here is a simple configuration file:
@ -112,10 +112,9 @@ websites:
- AND selected_by not defined. - AND selected_by not defined.
- Mark these tasks as selected by the current worker, on the current date. - Mark these tasks as selected by the current worker, on the current date.
### From time to time: ### From time to time (cleanup):
- Check for stalled tasks (datetime.now() - selected_at) > MAX_WORKER_TIME. Remove the lock. - Check for stalled tasks (datetime.now() - selected_at) > MAX_WORKER_TIME. Remove the lock.
### On the worker side ### On the worker side
Hey, I'm XX, give me some work. 1. Hey, I'm XX, give me some work.
<Service answers> 2. <Service answers> OK, this is done, here are the results for Task<id>: response.
OK, this is done, here are the results for Task<id>: response.

View file

@ -7,15 +7,14 @@ from argos import logging
from argos.logging import logger from argos.logging import logger
from argos.checks import CheckNotFound, get_check_by_name from argos.checks import CheckNotFound, get_check_by_name
from argos.schemas import Task, ClientResult, SerializableException from argos.schemas import Task, AgentResult, SerializableException
async def complete_task(http_client: httpx.AsyncClient, task: dict) -> dict:
async def complete_task(client: httpx.AsyncClient, task: dict) -> dict:
try: try:
task = Task(**task) task = Task(**task)
check_class = get_check_by_name(task.check) check_class = get_check_by_name(task.check)
check = check_class(client, task) check = check_class(http_client, task)
result = await check.run() result = await check.run()
status = result.status status = result.status
context = result.context context = result.context
@ -23,13 +22,16 @@ async def complete_task(client: httpx.AsyncClient, task: dict) -> dict:
except Exception as e: except Exception as e:
status = "error" status = "error"
context = SerializableException.from_exception(e) context = SerializableException.from_exception(e)
logger.error(f"An exception occured when trying to complete {task} : {e}") msg = f"An exception occured when running {task}. {e.__class__.__name__} : {e}"
return ClientResult(task=task.id, status=status, context=context) logger.error(msg)
return AgentResult(task_id=task.id, status=status, context=context)
async def post_results(client: httpx.AsyncClient, server: str, results: List[ClientResult]): async def post_results(
http_client: httpx.AsyncClient, server: str, results: List[AgentResult]
):
data = [r.model_dump() for r in results] data = [r.model_dump() for r in results]
response = await client.post(f"{server}/results", json=data) response = await http_client.post(f"{server}/results", json=data)
if response.status_code == httpx.codes.CREATED: if response.status_code == httpx.codes.CREATED:
logger.error(f"Successfully posted results {response.json()}") logger.error(f"Successfully posted results {response.json()}")
@ -40,9 +42,9 @@ async def post_results(client: httpx.AsyncClient, server: str, results: List[Cli
async def run(server: str, max_tasks: int): async def run(server: str, max_tasks: int):
tasks = [] tasks = []
async with httpx.AsyncClient() as client: async with httpx.AsyncClient() as http_client:
# Fetch the list of tasks # Fetch the list of tasks
response = await client.get(f"{server}/tasks") response = await http_client.get(f"{server}/tasks")
if response.status_code == httpx.codes.OK: if response.status_code == httpx.codes.OK:
# XXX Maybe we want to group the tests by URL ? (to issue one request per URL) # XXX Maybe we want to group the tests by URL ? (to issue one request per URL)
@ -50,13 +52,13 @@ async def run(server: str, max_tasks: int):
logger.info(f"Received {len(data)} tasks from the server") logger.info(f"Received {len(data)} tasks from the server")
for task in data: for task in data:
tasks.append(complete_task(client, task)) tasks.append(complete_task(http_client, task))
# Run up to max_tasks concurrent tasks # Run up to max_tasks concurrent tasks
results = await asyncio.gather(*tasks) results = await asyncio.gather(*tasks)
# Post the results # Post the results
await post_results(client, server, results) await post_results(http_client, server, results)
else: else:
logger.error(f"Failed to fetch tasks: {response.read()}") logger.error(f"Failed to fetch tasks: {response.read()}")

View file

@ -6,13 +6,28 @@ import httpx
from argos.schemas import Task from argos.schemas import Task
class Status:
ON_CHECK = "on-check"
SUCCESS = "success"
FAILURE = "failure"
# XXX We could name this Result, but is it could overlap with schemas.Result. # XXX We could name this Result, but is it could overlap with schemas.Result.
# Need to better define the naming around this. # Need to better define the naming around this.
# Status can be "Success" / "Failure" / "Error" or "On Check"
@dataclass @dataclass
class Response: class Response:
status: str status: str
context: dict context: dict
@classmethod
def new(cls, status, **kwargs):
if type(status) == bool:
status = Status.SUCCESS if status else Status.FAILURE
return cls(status=status, context=kwargs)
class BaseExpectedValue(BaseModel): class BaseExpectedValue(BaseModel):
expected: str expected: str
@ -34,6 +49,11 @@ class CheckNotFound(Exception):
pass pass
class InvalidResponse(Exception):
def __str__(self):
return "The provided response is missing a 'status' key."
class BaseCheck: class BaseCheck:
config: str config: str
expected_cls: Type[BaseExpectedValue] = None expected_cls: Type[BaseExpectedValue] = None
@ -55,17 +75,19 @@ class BaseCheck:
raise CheckNotFound(name) raise CheckNotFound(name)
return check return check
def response(self, passed, **kwargs) -> Response: def __init__(self, http_client: httpx.AsyncClient, task: Task):
status = "success" if passed else "failure" self.http_client = http_client
return Response(status, kwargs) self.task = task
@property @property
def expected(self): def expected(self):
return self.expected_cls(expected=self.task.expected).get_converted() return self.expected_cls(expected=self.task.expected).get_converted()
def __init__(self, client: httpx.AsyncClient, task: Task): def response(self, **kwargs):
self.client = client if "status" not in kwargs:
self.task = task raise InvalidResponse(kwargs)
status = kwargs.pop("status")
return Response.new(status, **kwargs)
def get_check_by_name(name): def get_check_by_name(name):

View file

@ -1,5 +1,11 @@
from argos.logging import logger from argos.logging import logger
from argos.checks.base import BaseCheck, Response, ExpectedIntValue, ExpectedStringValue from argos.checks.base import (
BaseCheck,
Response,
Status,
ExpectedIntValue,
ExpectedStringValue,
)
import ssl import ssl
import time import time
@ -14,10 +20,10 @@ class HTTPStatus(BaseCheck):
async def run(self) -> dict: async def run(self) -> dict:
# XXX Get the method from the task # XXX Get the method from the task
task = self.task task = self.task
response = await self.client.request(method="get", url=task.url) response = await self.http_client.request(method="get", url=task.url)
logger.error(f"{response.status_code=}, {self.expected=}")
return self.response( return self.response(
response.status_code == self.expected, status=response.status_code == self.expected,
expected=self.expected, expected=self.expected,
retrieved=response.status_code, retrieved=response.status_code,
) )
@ -28,8 +34,8 @@ class HTTPBodyContains(BaseCheck):
expected_cls = ExpectedStringValue expected_cls = ExpectedStringValue
async def run(self) -> dict: async def run(self) -> dict:
response = await self.client.request(method="get", url=self.task.url) response = await self.http_client.request(method="get", url=self.task.url)
return self.response(self.expected in response.text) return self.response(status=self.expected in response.text)
class SSLCertificateExpiration(BaseCheck): class SSLCertificateExpiration(BaseCheck):
@ -37,23 +43,25 @@ class SSLCertificateExpiration(BaseCheck):
expected_cls = ExpectedStringValue expected_cls = ExpectedStringValue
async def run(self): async def run(self):
response = await self.client.get(self.task.url) """Returns the number of days in which the certificate will expire."""
response = await self.http_client.get(self.task.url)
if response.is_error: if response.is_error:
raise raise
conn = self.client.transport.get_connection_info(self.task.url) network_stream = ssl_object = response.extensions["network_stream"]
cert = ssl.DER_cert_to_PEM_cert(conn.raw_certificates[0]) ssl_obj = network_stream.get_extra_info("ssl_object")
cert = ssl_obj.getpeercert()
x509 = crypto.load_certificate(crypto.FILETYPE_PEM, cert) not_after = datetime.strptime(cert.get("notAfter"), "%b %d %H:%M:%S %Y %Z")
not_after = x509.get_notAfter().decode("utf-8") expires_in = (not_after - datetime.now()).days
not_after = datetime.strptime(not_after, "%Y%m%d%H%M%SZ")
now = time.time() return self.response(status=Status.ON_CHECK, expires_in=expires_in)
if time.mktime(not_after.timetuple()) < now:
expired = True
else:
expired = False
return self.response( @classmethod
expired == False, expected=now, retrieved=not_after.timetuple() async def finalize(cls, config, callback, expires_in):
) thresholds = config.ssl.thresholds
thresholds.sort(reverse=True)
for days, severity in thresholds:
if expires_in > days:
callback(severity)
break

View file

@ -1,10 +1,11 @@
import logging import logging
LOG_LEVELS = ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'] LOG_LEVELS = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]
# XXX We probably want different loggers for client and server. # XXX We probably want different loggers for client and server.
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
# XXX Does not work ? # XXX Does not work ?
def set_log_level(log_level): def set_log_level(log_level):
level = getattr(logging, log_level.upper(), None) level = getattr(logging, log_level.upper(), None)

View file

@ -1,7 +1,7 @@
from pydantic import BaseModel from pydantic import BaseModel
from enum import StrEnum from enum import StrEnum
from typing import List, Optional, Tuple from typing import List, Optional, Tuple, Literal
from typing import Dict, Union, List from typing import Dict, Union, List
@ -9,19 +9,33 @@ import yaml
from pydantic import BaseModel, Field, HttpUrl, validator from pydantic import BaseModel, Field, HttpUrl, validator
from datetime import datetime from datetime import datetime
# from argos.checks import get_names as get_check_names # from argos.checks import get_names as get_check_names
# XXX Find a way to check without having cirular imports # XXX Find a way to check without having cirular imports
# This file contains the pydantic schemas. For the database models, check in argos.model. # This file contains the pydantic schemas. For the database models, check in argos.model.
class Thresholds(BaseModel): Severity = Literal["warning", "error", "critical"]
critical: str = Field(alias="critical")
warning: str = Field(alias="warning")
class SSL(BaseModel): class SSL(BaseModel):
thresholds: Thresholds thresholds: List[Tuple[int, Severity]]
@validator("thresholds", each_item=True, pre=True)
def parse_threshold(cls, value):
for duration_str, severity in value.items():
num = int("".join(filter(str.isdigit, duration_str)))
if "d" in duration_str:
num = num
elif "w" in duration_str:
num = num * 7
elif "m" in duration_str:
num = num * 30
else:
raise ValueError("Invalid duration value")
# Return here because it's one-item dicts.
return (num, severity)
class WebsiteCheck(BaseModel): class WebsiteCheck(BaseModel):
@ -49,7 +63,7 @@ class WebsiteCheck(BaseModel):
class WebsitePath(BaseModel): class WebsitePath(BaseModel):
path: str path: str
checks: List[WebsiteCheck] checks: List[Dict[str, str | dict | int]]
class Website(BaseModel): class Website(BaseModel):
@ -84,7 +98,6 @@ def validate_config(config: dict):
return Config(**config) return Config(**config)
# Method to load YAML file
def from_yaml(file_name): def from_yaml(file_name):
parsed = load_yaml(file_name) parsed = load_yaml(file_name)
return validate_config(parsed) return validate_config(parsed)

View file

@ -5,18 +5,25 @@ import traceback
# XXX Refactor using SQLModel to avoid duplication of model data # XXX Refactor using SQLModel to avoid duplication of model data
class Task(BaseModel): class Task(BaseModel):
id : int id: int
url: str url: str
domain: str domain: str
check: str check: str
expected: str expected: str
selected_at: datetime | None selected_at: datetime | None
selected_by : str | None selected_by: str | None
class Config: class Config:
from_attributes = True from_attributes = True
def __str__(self):
id = self.id
url = self.url
check = self.check
return f"Task ({id}): {url} - {check}"
class SerializableException(BaseModel): class SerializableException(BaseModel):
error_message: str error_message: str
@ -28,10 +35,12 @@ class SerializableException(BaseModel):
return SerializableException( return SerializableException(
error_message=str(e), error_message=str(e),
error_type=str(type(e).__name__), error_type=str(type(e).__name__),
error_details=traceback.format_exc() error_details=traceback.format_exc(),
) )
class ClientResult(BaseModel):
task : int class AgentResult(BaseModel):
status : Literal["success", "failure", "error"] task_id: int
# The checked status means that the service needs to finish the checks to determine the severity.
status: Literal["success", "failure", "error", "on-check"]
context: dict | SerializableException context: dict | SerializableException

View file

@ -1,11 +1,13 @@
from fastapi import Depends, FastAPI, HTTPException, Request from fastapi import Depends, FastAPI, HTTPException, Request
from sqlalchemy.orm import Session from sqlalchemy.orm import Session
from pydantic import BaseModel from pydantic import BaseModel, ValidationError
import sys
from argos.server import queries, models from argos.server import queries, models
from argos.schemas import ClientResult, Task from argos.schemas import AgentResult, Task
from argos.schemas.config import from_yaml as get_schemas_from_yaml from argos.schemas.config import from_yaml as get_schemas_from_yaml
from argos.server.database import SessionLocal, engine from argos.server.database import SessionLocal, engine
from argos.checks import get_check_by_name
from argos.logging import logger from argos.logging import logger
from typing import List from typing import List
@ -25,7 +27,14 @@ def get_db():
@app.on_event("startup") @app.on_event("startup")
async def read_config_and_populate_db(): async def read_config_and_populate_db():
# XXX Get filename from environment. # XXX Get filename from environment.
config = get_schemas_from_yaml("config.yaml") try:
config = get_schemas_from_yaml("config.yaml")
app.config = config
except ValidationError as e:
logger.error(f"Errors where found while reading configuration:")
for error in e.errors():
logger.error(f"{error['loc']} is {error['type']}")
sys.exit(1)
db = SessionLocal() db = SessionLocal()
try: try:
@ -37,14 +46,43 @@ async def read_config_and_populate_db():
# XXX Get the default limit from the config # XXX Get the default limit from the config
@app.get("/tasks", response_model=list[Task]) @app.get("/tasks", response_model=list[Task])
async def read_tasks(request: Request, limit: int = 20, db: Session = Depends(get_db)): async def read_tasks(request: Request, limit: int = 20, db: Session = Depends(get_db)):
tasks = await queries.list_tasks(db, client_id=request.client.host, limit=limit) # XXX Let the agents specifify their names (and use hostnames)
tasks = await queries.list_tasks(db, agent_id=request.client.host, limit=limit)
return tasks return tasks
@app.post("/results", status_code=201) @app.post("/results", status_code=201)
async def create_result(results: List[ClientResult], db: Session = Depends(get_db)): async def create_result(results: List[AgentResult], db: Session = Depends(get_db)):
"""Get the results from the agents and store them locally.
- Finalize the checks (some checks need the server to do some part of the validation,
for instance because they need access to the configuration)
- If it's an error, determine its severity ;
- Trigger the reporting calls
"""
db_results = [] db_results = []
for client_result in results: for agent_result in results:
db_results.append(await queries.create_result(db, client_result)) result = await queries.create_result(db, agent_result)
# XXX Maybe offload this to a queue.
# XXX Use a schema for the on-check value.
if result.status == "on-check":
task = await queries.get_task(db, agent_result.task_id)
if not task:
logger.error(f"Unable to find task {agent_result.task_id}")
else:
check = task.get_check()
callback = logger.error
await check.finalize(app.config, callback=callback, **result.context)
db_results.append(result)
db.commit() db.commit()
return {"result_ids": [r.id for r in db_results]} return {"result_ids": [r.id for r in db_results]}
@app.get("/stats")
async def get_stats(db: Session = Depends(get_db)):
return {
"tasks_count": await queries.count_tasks(db),
"results_count": await queries.count_results(db),
}

View file

@ -1,12 +1,22 @@
from typing import List, Literal from typing import List, Literal
from sqlalchemy import Boolean, Column, ForeignKey, Integer, String, JSON, DateTime, Enum from sqlalchemy import (
Boolean,
Column,
ForeignKey,
Integer,
String,
JSON,
DateTime,
Enum,
)
from sqlalchemy.orm import relationship, Mapped, mapped_column, DeclarativeBase from sqlalchemy.orm import relationship, Mapped, mapped_column, DeclarativeBase
from sqlalchemy_utils import ChoiceType from sqlalchemy_utils import ChoiceType
from sqlalchemy.orm import mapped_column from sqlalchemy.orm import mapped_column, relationship
from datetime import datetime from datetime import datetime
from argos.schemas import WebsiteCheck from argos.schemas import WebsiteCheck
from argos.checks import get_check_by_name
class Base(DeclarativeBase): class Base(DeclarativeBase):
@ -34,12 +44,27 @@ class Task(Base):
selected_by: Mapped[str] = mapped_column(nullable=True) selected_by: Mapped[str] = mapped_column(nullable=True)
selected_at: Mapped[datetime] = mapped_column(nullable=True) selected_at: Mapped[datetime] = mapped_column(nullable=True)
results: Mapped[List["Result"]] = relationship(back_populates="task")
def __str__(self):
return f"DB Task {self.url} - {self.check} - {self.expected}"
def get_check(self):
"""Returns a check instance for this specific task"""
return get_check_by_name(self.check)
class Result(Base): class Result(Base):
__tablename__ = "results" __tablename__ = "results"
id: Mapped[int] = mapped_column(primary_key=True) id: Mapped[int] = mapped_column(primary_key=True)
task_id: Mapped[int] = mapped_column(ForeignKey("tasks.id"))
task: Mapped["Task"] = relationship(back_populates="results")
submitted_at: Mapped[datetime] = mapped_column() submitted_at: Mapped[datetime] = mapped_column()
status: Mapped[Literal["success", "failure", "error"]] =\ status: Mapped[Literal["success", "failure", "error", "on-check"]] = mapped_column(
mapped_column(Enum("success", "failure", "error")) Enum("success", "failure", "error", "on-check")
)
context: Mapped[dict] = mapped_column() context: Mapped[dict] = mapped_column()
def __str__(self):
return f"DB Result {self.id} - {self.status} - {self.context}"

View file

@ -9,28 +9,41 @@ from urllib.parse import urljoin
from datetime import datetime from datetime import datetime
async def list_tasks(db: Session, client_id: str, limit: int = 100): async def list_tasks(db: Session, agent_id: str, limit: int = 100):
"""List tasks and mark them as selected""" """List tasks and mark them as selected"""
tasks = db.query(Task).where(Task.selected_by == None).limit(limit).all() tasks = db.query(Task).where(Task.selected_by == None).limit(limit).all()
now = datetime.now() now = datetime.now()
# XXX: Deactivated for now, as it simplifies testing. # XXX: Deactivated for now, as it simplifies testing.
# for task in tasks: # for task in tasks:
# task.selected_at = now # task.selected_at = now
# task.selected_by = client_id # task.selected_by = agent_id
# db.commit() # db.commit()
return tasks return tasks
async def create_result(db: Session, client_result: schemas.ClientResult): async def get_task(db: Session, id):
return db.query(Task).get(id)
async def create_result(db: Session, agent_result: schemas.AgentResult):
result = Result( result = Result(
submitted_at=datetime.now(), submitted_at=datetime.now(),
status=client_result.status, status=agent_result.status,
context=client_result.context, context=agent_result.context,
task_id=agent_result.task_id,
) )
db.add(result) db.add(result)
return result return result
async def count_tasks(db: Session):
return db.query(Task).count()
async def count_results(db: Session):
return db.query(Result).count()
async def update_from_config(db: Session, config: schemas.Config): async def update_from_config(db: Session, config: schemas.Config):
for website in config.websites: for website in config.websites:
domain = str(website.domain) domain = str(website.domain)
@ -51,7 +64,7 @@ async def update_from_config(db: Session, config: schemas.Config):
task = Task( task = Task(
domain=domain, url=url, check=check_key, expected=expected domain=domain, url=url, check=check_key, expected=expected
) )
logger.debug(f"Adding a new task in the db: {task=}") logger.debug(f"Adding a new task in the db: {task}")
db.add(task) db.add(task)
else: else:
logger.debug( logger.debug(

View file

@ -1,89 +1,87 @@
general: general:
frequency: 4h # Run checks every 4 hours. frequency: 4h # Run checks every 4 hours.
alerts: alerts:
error: error:
- local - local
warning: warning:
- local - local
alert: alert:
- local - local
service: service:
port: 8888 port: 8888
# Can be generated using `openssl rand -base64 32`. # Can be generated using `openssl rand -base64 32`.
secrets: secrets:
- "O4kt8Max9/k0EmHaEJ0CGGYbBNFmK8kOZNIoUk3Kjwc" - "O4kt8Max9/k0EmHaEJ0CGGYbBNFmK8kOZNIoUk3Kjwc"
- "x1T1VZR51pxrv5pQUyzooMG4pMUvHNMhA5y/3cUsYVs=" - "x1T1VZR51pxrv5pQUyzooMG4pMUvHNMhA5y/3cUsYVs="
ssl: ssl:
thresholds: thresholds:
critical: "1d" - "1d": critical
warning: "10d" "5d": warning
websites: websites:
- domain: "https://mypads.framapad.org" - domain: "https://mypads.framapad.org"
paths: paths:
- path: "/mypads/" - path: "/mypads/"
checks: checks:
- status-is: 200 - status-is: 200
- body-contains: '<div id= "mypads"></div>' - body-contains: '<div id= "mypads"></div>'
# le check du certificat devrait plutôt être au niveau - ssl-certificate-expiration: "on-check"
# de domain et paths, AMHA - path: "/admin/"
- ssl-certificate-expiration: "on-check" checks:
- path: "/admin/" - status-is: 401
checks: - domain: "https://munin.framasoft.org"
- status-is: 401 paths:
- domain: "https://munin.framasoft.org" - path: "/"
paths: checks:
- path: "/" - status-is: 301
checks: - path: "/munin/"
- status-is: 301 checks:
- path: "/munin/" - status-is: 401
checks: - domain: "https://framagenda.org"
- status-is: 401 paths:
- domain: "https://framagenda.org" - path: "/status.php"
paths: checks:
- path: "/status.php" - status-is: 200
checks: # Là, idéalement, il faudrait un json-contains,
- status-is: 200 # qui serait une table de hachage
# Là, idéalement, il faudrait un json-contains, - body-contains: '"maintenance":false'
# qui serait une table de hachage - ssl-certificate-expiration: "on-check"
- body-contains: '"maintenance":false' - path: "/"
- ssl-certificate-expiration: "on-check" checks:
- path: "/" - status-is: 302
checks: - path: "/login"
- status-is: 302 checks:
- path: "/login" - status-is: 200
checks: - domain: "https://framadrive.org"
- status-is: 200 paths:
- domain: "https://framadrive.org" - path: "/status.php"
paths: checks:
- path: "/status.php" - status-is: 200
checks: - body-contains: '"maintenance":false'
- status-is: 200 - ssl-certificate-expiration: "on-check"
- body-contains: '"maintenance":false' - path: "/"
- ssl-certificate-expiration: "on-check" checks:
- path: "/" - status-is: 302
checks: - path: "/login"
- status-is: 302 checks:
- path: "/login" - status-is: 200
checks: - domain: "https://cloud.framabook.org"
- status-is: 200 paths:
- domain: "https://cloud.framabook.org" - path: "/status.php"
paths: checks:
- path: "/status.php" - status-is: 200
checks: - body-contains: '"maintenance":false'
- status-is: 200 - ssl-certificate-expiration: "on-check"
- body-contains: '"maintenance":false' - path: "/"
- ssl-certificate-expiration: "on-check" checks:
- path: "/" - status-is: 302
checks: - path: "/login"
- status-is: 302 checks:
- path: "/login" - status-is: 200
checks: - domain: "https://framasoft.org"
- status-is: 200 paths:
- domain: "https://framasoft.org" - path: "/"
path: checks:
- path: "/" - status-is: 200
checks: - ssl-certificate-expiration: "on-check"
- status-is: 200
- ssl-certificate-expiration: "on-check"

56
pyproject.toml Normal file
View file

@ -0,0 +1,56 @@
[build-system]
requires = ["setuptools", "setuptools-scm"]
build-backend = "setuptools.build_meta"
[project]
name = "argos"
version = "0.1.0"
description = "Distributed supervision tool for HTTP."
authors = [
{ name = "Alexis Métaireau", email = "alexis@notmyidea.org" },
]
readme = "README.md"
classifiers = [
"Programming Language :: Python :: 3.11",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
]
dependencies = [
"click>=8.1,<9",
"fastapi>=0.103,<0.104",
"httpx>=0.25,<1",
"pydantic>=2.4,<3",
"pyyaml>=6.0,<7",
"sqlalchemy[asyncio]>=2.0,<3",
"sqlalchemy-utils>=0.41,<1",
"uvicorn>=0.23,<1",
]
[project.urls]
homepage = "https://framagit.org/framasoft/framaspace/argos"
repository = "https://framagit.org/framasoft/framaspace/argos"
"Funding" = "https://framasoft.org/en/#support"
"Tracker" = "https://framagit.org/framasoft/framaspace/argos/-/issues"
[tool.setuptools]
packages = ["argos"]
[project.optional-dependencies]
dev = [
"black==23.3.0",
"isort==5.11.5",
"pytest>=6.2.5",
]
[project.scripts]
argos-agent = "argos.agent.cli:main"
[tool.pytest.ini_options]
minversion = "6.0"
addopts = "-ra -q"
testpaths = [
"tests",
"argos"
]
pythonpath = "."

20
tests/test_checks_base.py Normal file
View file

@ -0,0 +1,20 @@
import pytest
from argos.checks.base import Response, Status
def test_response_failure_with_context():
resp = Response.new(False, some="context", another=True)
assert resp.status == Status.FAILURE
assert resp.context == {"some": "context", "another": True}
def test_response_success():
resp = Response.new(True)
assert resp.status == Status.SUCCESS
def test_response_on_check_with_context():
resp = Response.new(Status.ON_CHECK, expires_in=3)
assert resp.status == Status.ON_CHECK
assert resp.status == "on-check"
assert resp.context == {"expires_in": 3}

View file

@ -0,0 +1,16 @@
import pytest
from argos.schemas.config import SSL
def test_ssl_duration_parsing():
data = {"thresholds": [{"2d": "warning"}, {"3w": "error"}]}
# Test the validation and parsing of SSL model
ssl_object = SSL(**data)
assert len(ssl_object.thresholds) == 2
assert ssl_object.thresholds == [(2, "warning"), (21, "error")]
# Test the constraint on severity
with pytest.raises(ValueError):
erroneous_data = {"thresholds": [{"1d": "caution"}, {"1w": "danger"}]}
SSL(**erroneous_data)