Compare commits

..

10 commits

Author SHA1 Message Date
9a9ac2736a
Merge 27d9396d12 into d722800a4b 2024-11-20 17:58:10 +01:00
Alexis Métaireau
27d9396d12
docs: Update the release instructions
This commit makes changes to the release instructions, prefering bash
exemples when that's possible. As a result, the QA.md and RELEASE.md
files have been separated and a new `generate-release-tasks.py` script
is making its apparition.
2024-11-20 17:54:45 +01:00
Alexis Métaireau
f456e644d5
Update QA script to support Fedora 41 2024-11-20 17:45:43 +01:00
Alexis Métaireau
9bc443888e
docs: Add a step to download tesseract data in the RELEASE notes 2024-11-20 17:45:43 +01:00
Alexis Métaireau
d722800a4b
Update Lock file
Some checks are pending
Tests / macOS (x86_64) (push) Blocked by required conditions
Tests / build-deb (debian bookworm) (push) Blocked by required conditions
Tests / build-deb (debian bullseye) (push) Blocked by required conditions
Tests / build-deb (debian trixie) (push) Blocked by required conditions
Tests / build-deb (ubuntu 20.04) (push) Blocked by required conditions
Tests / build-deb (ubuntu 22.04) (push) Blocked by required conditions
Tests / build-deb (ubuntu 24.04) (push) Blocked by required conditions
Tests / build-deb (ubuntu 24.10) (push) Blocked by required conditions
Tests / install-deb (debian bookworm) (push) Blocked by required conditions
Tests / install-deb (debian bullseye) (push) Blocked by required conditions
Tests / install-deb (debian trixie) (push) Blocked by required conditions
Tests / install-deb (ubuntu 20.04) (push) Blocked by required conditions
Tests / install-deb (ubuntu 22.04) (push) Blocked by required conditions
Tests / install-deb (ubuntu 24.04) (push) Blocked by required conditions
Tests / install-deb (ubuntu 24.10) (push) Blocked by required conditions
Tests / build-install-rpm (fedora 39) (push) Blocked by required conditions
Tests / build-install-rpm (fedora 40) (push) Blocked by required conditions
Tests / build-install-rpm (fedora 41) (push) Blocked by required conditions
Tests / run tests (debian bookworm) (push) Blocked by required conditions
Tests / run tests (debian bullseye) (push) Blocked by required conditions
Tests / run tests (debian trixie) (push) Blocked by required conditions
Tests / run tests (fedora 39) (push) Blocked by required conditions
Tests / run tests (fedora 40) (push) Blocked by required conditions
Tests / run tests (fedora 41) (push) Blocked by required conditions
Tests / run tests (ubuntu 20.04) (push) Blocked by required conditions
Tests / run tests (ubuntu 22.04) (push) Blocked by required conditions
Tests / run tests (ubuntu 24.04) (push) Blocked by required conditions
Tests / run tests (ubuntu 24.10) (push) Blocked by required conditions
Scan latest app and container / security-scan-container (push) Waiting to run
Scan latest app and container / security-scan-app (push) Waiting to run
2024-11-20 17:42:59 +01:00
Alexis Métaireau
4cfc633cdb
Add a script to help generate release notes from merged pull requests 2024-11-20 17:42:59 +01:00
Alexis Métaireau
944d58dd8d
CI: Update container scanning to account for the arm64 architecture. 2024-11-20 17:12:20 +01:00
Alexis Métaireau
f3806b96af
Reapply "Disable gVisor's DirectFS feature.""
This reverts commit 68f8338d20.

Fixes #982
2024-11-20 16:41:56 +01:00
Alexis Métaireau
c4bb7c28c8
Unpin gVisor, now that upstream is able to support Linux Yama Mode 2
Fixes #298
2024-11-20 16:41:55 +01:00
Alexis Métaireau
630083bdea
CI: Only run the CI on pull requests, and on the "main" branch
Previously, the actions were duplicated, due to the fact when developing
we often create feature branches and open pull requests.

This new setup requires us to open pull requests to trigger the CI.
2024-11-20 15:56:28 +01:00
15 changed files with 414 additions and 72 deletions

View file

@ -1,6 +1,10 @@
name: Build dev environments name: Build dev environments
on: on:
pull_request:
push: push:
branches:
- main
- "test/**"
schedule: schedule:
- cron: "0 0 * * *" # Run every day at 00:00 UTC. - cron: "0 0 * * *" # Run every day at 00:00 UTC.

View file

@ -1,6 +1,6 @@
name: Check branch conformity name: Check branch conformity
on: on:
push: pull_request:
jobs: jobs:
prevent-fixup-commits: prevent-fixup-commits:

View file

@ -1,8 +1,10 @@
name: Tests name: Tests
on: on:
push:
pull_request: pull_request:
branches: [main] push:
branches:
- main
- "test/**"
schedule: schedule:
- cron: "2 0 * * *" # Run every day at 02:00 UTC. - cron: "2 0 * * *" # Run every day at 02:00 UTC.
workflow_dispatch: workflow_dispatch:
@ -91,7 +93,8 @@ jobs:
windows: windows:
runs-on: windows-latest runs-on: windows-latest
needs: download-tessdata needs:
- download-tessdata
env: env:
DUMMY_CONVERSION: 1 DUMMY_CONVERSION: 1
steps: steps:
@ -121,7 +124,8 @@ jobs:
macOS: macOS:
name: "macOS (${{ matrix.arch }})" name: "macOS (${{ matrix.arch }})"
runs-on: ${{ matrix.runner }} runs-on: ${{ matrix.runner }}
needs: download-tessdata needs:
- download-tessdata
strategy: strategy:
matrix: matrix:
include: include:
@ -149,9 +153,10 @@ jobs:
run: poetry run make test run: poetry run make test
build-deb: build-deb:
needs:
- build-container-image
name: "build-deb (${{ matrix.distro }} ${{ matrix.version }})" name: "build-deb (${{ matrix.distro }} ${{ matrix.version }})"
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: build-container-image
strategy: strategy:
matrix: matrix:
include: include:
@ -219,7 +224,8 @@ jobs:
install-deb: install-deb:
name: "install-deb (${{ matrix.distro }} ${{ matrix.version }})" name: "install-deb (${{ matrix.distro }} ${{ matrix.version }})"
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: build-deb needs:
- build-deb
strategy: strategy:
matrix: matrix:
include: include:
@ -273,7 +279,8 @@ jobs:
build-install-rpm: build-install-rpm:
name: "build-install-rpm (${{ matrix.distro }} ${{matrix.version}})" name: "build-install-rpm (${{ matrix.distro }} ${{matrix.version}})"
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: build-container-image needs:
- build-container-image
strategy: strategy:
matrix: matrix:
distro: ["fedora"] distro: ["fedora"]

View file

@ -1,8 +1,9 @@
name: Scan latest app and container name: Scan latest app and container
on: on:
push: push:
branches:
- main
pull_request: pull_request:
branches: [ main ]
schedule: schedule:
- cron: '0 0 * * *' # Run every day at 00:00 UTC. - cron: '0 0 * * *' # Run every day at 00:00 UTC.
workflow_dispatch: workflow_dispatch:

View file

@ -6,14 +6,21 @@ on:
jobs: jobs:
security-scan-container: security-scan-container:
runs-on: ubuntu-latest strategy:
matrix:
include:
- runs-on: ubuntu-latest
arch: i686
- runs-on: macos-latest
arch: arm64
runs-on: ${{ matrix.runs-on }}
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v4 uses: actions/checkout@v4
- name: Download container image for the latest release and load it - name: Download container image for the latest release and load it
run: | run: |
VERSION=$(curl https://api.github.com/repos/freedomofpress/dangerzone/releases/latest | jq -r '.tag_name') VERSION=$(curl https://api.github.com/repos/freedomofpress/dangerzone/releases/latest | jq -r '.tag_name')
CONTAINER_FILENAME=container-${VERSION:1}-i686.tar.gz CONTAINER_FILENAME=container-${VERSION:1}-${{ matrix.arch }}.tar.gz
wget https://github.com/freedomofpress/dangerzone/releases/download/${VERSION}/${CONTAINER_FILENAME} -O ${CONTAINER_FILENAME} wget https://github.com/freedomofpress/dangerzone/releases/download/${VERSION}/${CONTAINER_FILENAME} -O ${CONTAINER_FILENAME}
docker load -i ${CONTAINER_FILENAME} docker load -i ${CONTAINER_FILENAME}
# NOTE: Scan first without failing, else we won't be able to read the scan # NOTE: Scan first without failing, else we won't be able to read the scan
@ -30,7 +37,7 @@ jobs:
uses: github/codeql-action/upload-sarif@v3 uses: github/codeql-action/upload-sarif@v3
with: with:
sarif_file: ${{ steps.scan_container.outputs.sarif }} sarif_file: ${{ steps.scan_container.outputs.sarif }}
category: container category: container-${{ matrix.arch }}
- name: Inspect container scan report - name: Inspect container scan report
run: cat ${{ steps.scan_container.outputs.sarif }} run: cat ${{ steps.scan_container.outputs.sarif }}
- name: Scan container image - name: Scan container image

View file

@ -260,16 +260,11 @@ The following instructions require typing commands in a terminal in dom0.
``` ```
qvm-create --class AppVM --label red --template fedora-40-dz dz qvm-create --class AppVM --label red --template fedora-40-dz dz
qvm-volume resize dz:private $(numfmt --from=auto 5Gi)
``` ```
> :bulb: Alternatively, you can use a different app qube for Dangerzone > :bulb: Alternatively, you can use a different app qube for Dangerzone
> development. In that case, replace `dz` with the qube of your choice in the > development. In that case, replace `dz` with the qube of your choice in the
> steps below. > steps below.
>
> In the commands above, we also resize the private volume of the `dz` qube
> to 5GiB, since the Tesseract data that will be downloaded in the next steps
> take a bit of space.
4. Add an RPC policy (`/etc/qubes/policy.d/50-dangerzone.policy`) that will 4. Add an RPC policy (`/etc/qubes/policy.d/50-dangerzone.policy`) that will
allow launching a disposable qube (`dz-dvm`) when Dangerzone converts a allow launching a disposable qube (`dz-dvm`) when Dangerzone converts a

View file

@ -7,6 +7,11 @@ since 0.4.1, and this project adheres to [Semantic Versioning](https://semver.or
## [Unreleased](https://github.com/freedomofpress/dangerzone/compare/v0.8.0...HEAD) ## [Unreleased](https://github.com/freedomofpress/dangerzone/compare/v0.8.0...HEAD)
### Added
- Disable gVisor's DirectFS feature ([#226](https://github.com/freedomofpress/dangerzone/issues/226)).
Thanks [EtiennePerot](https://github.com/EtiennePerot) for the contribution.
## [0.8.0](https://github.com/freedomofpress/dangerzone/compare/v0.8.0...0.7.1) ## [0.8.0](https://github.com/freedomofpress/dangerzone/compare/v0.8.0...0.7.1)
### Added ### Added

View file

@ -74,9 +74,7 @@ FROM alpine:latest
RUN apk --no-cache -U upgrade && \ RUN apk --no-cache -U upgrade && \
apk --no-cache add python3 apk --no-cache add python3
# Temporarily pin gVisor to the latest working version (release-20240826.0). RUN GVISOR_URL="https://storage.googleapis.com/gvisor/releases/release/latest/$(uname -m)"; \
# See: https://github.com/freedomofpress/dangerzone/issues/928
RUN GVISOR_URL="https://storage.googleapis.com/gvisor/releases/release/20240826/$(uname -m)"; \
wget "${GVISOR_URL}/runsc" "${GVISOR_URL}/runsc.sha512" && \ wget "${GVISOR_URL}/runsc" "${GVISOR_URL}/runsc.sha512" && \
sha512sum -c runsc.sha512 && \ sha512sum -c runsc.sha512 && \
rm -f runsc.sha512 && \ rm -f runsc.sha512 && \

View file

@ -6,7 +6,7 @@ Take potentially dangerous PDFs, office documents, or images and convert them to
| ![Settings](./assets/screenshot1.png) | ![Converting](./assets/screenshot2.png) | ![Settings](./assets/screenshot1.png) | ![Converting](./assets/screenshot2.png)
|--|--| |--|--|
Dangerzone works like this: You give it a document that you don't know if you can trust (for example, an email attachment). Inside of a sandbox, Dangerzone converts the document to a PDF (if it isn't already one), and then converts the PDF into raw pixel data: a huge list of RGB color values for each page. Then, outside of the sandbox, Dangerzone takes this pixel data and converts it back into a PDF. Dangerzone works like this: You give it a document that you don't know if you can trust (for example, an email attachment). Inside of a sandbox, Dangerzone converts the document to a PDF (if it isn't already one), and then converts the PDF into raw pixel data: a huge list of RGB color values for each page. Then, in a separate sandbox, Dangerzone takes this pixel data and converts it back into a PDF.
_Read more about Dangerzone in the [official site](https://dangerzone.rocks/about/)._ _Read more about Dangerzone in the [official site](https://dangerzone.rocks/about/)._

View file

@ -142,6 +142,9 @@ runsc_argv = [
"--rootless=true", "--rootless=true",
"--network=none", "--network=none",
"--root=/home/dangerzone/.containers", "--root=/home/dangerzone/.containers",
# Disable DirectFS for to make the seccomp filter even stricter,
# at some performance cost.
"--directfs=false",
] ]
if os.environ.get("RUNSC_DEBUG"): if os.environ.get("RUNSC_DEBUG"):
runsc_argv += ["--debug=true", "--alsologtostderr=true"] runsc_argv += ["--debug=true", "--alsologtostderr=true"]

View file

@ -0,0 +1,254 @@
#!/usr/bin/env python3
import argparse
import asyncio
import re
import sys
from datetime import datetime
from typing import Dict, List, Optional, Tuple
import httpx
REPOSITORY = "https://github.com/freedomofpress/dangerzone/"
TEMPLATE = "- {title} ([#{number}]({url}))"
def parse_version(version: str) -> Tuple[int, int]:
"""Extract major.minor from version string, ignoring patch"""
match = re.match(r"v?(\d+)\.(\d+)", version)
if not match:
raise ValueError(f"Invalid version format: {version}")
return (int(match.group(1)), int(match.group(2)))
async def get_last_minor_release(
client: httpx.AsyncClient, owner: str, repo: str
) -> Optional[str]:
"""Get the latest minor release date (ignoring patches)"""
response = await client.get(f"https://api.github.com/repos/{owner}/{repo}/releases")
response.raise_for_status()
releases = response.json()
if not releases:
return None
# Get the latest minor version by comparing major.minor numbers
current_version = parse_version(releases[0]["tag_name"])
latest_date = None
for release in releases:
try:
version = parse_version(release["tag_name"])
if version < current_version:
latest_date = release["published_at"]
break
except ValueError:
continue
return latest_date
async def get_issue_details(
client: httpx.AsyncClient, owner: str, repo: str, issue_number: int
) -> Optional[dict]:
"""Get issue title and number if it exists"""
response = await client.get(
f"https://api.github.com/repos/{owner}/{repo}/issues/{issue_number}"
)
if response.is_success:
data = response.json()
return {
"title": data["title"],
"number": data["number"],
"url": data["html_url"],
}
return None
def extract_issue_number(pr_body: Optional[str]) -> Optional[int]:
"""Extract issue number from PR body looking for common formats like 'Fixes #123' or 'Closes #123'"""
if not pr_body:
return None
patterns = [
r"(?:closes|fixes|resolves)\s*#(\d+)",
r"(?:close|fix|resolve)\s*#(\d+)",
]
for pattern in patterns:
match = re.search(pattern, pr_body.lower())
if match:
return int(match.group(1))
return None
async def verify_commit_in_master(
client: httpx.AsyncClient, owner: str, repo: str, commit_id: str
) -> bool:
"""Verify if a commit exists in master"""
response = await client.get(
f"https://api.github.com/repos/{owner}/{repo}/commits/{commit_id}"
)
return response.is_success and response.json().get("commit") is not None
async def process_issue_events(
client: httpx.AsyncClient, owner: str, repo: str, issue: Dict
) -> Optional[Dict]:
"""Process events for a single issue"""
events_response = await client.get(f"{issue['url']}/events")
if not events_response.is_success:
return None
for event in events_response.json():
if event["event"] == "closed" and event.get("commit_id"):
if await verify_commit_in_master(client, owner, repo, event["commit_id"]):
return {
"title": issue["title"],
"number": issue["number"],
"url": issue["html_url"],
}
return None
async def get_closed_issues(
client: httpx.AsyncClient, owner: str, repo: str, since: str
) -> List[Dict]:
"""Get issues closed by commits to master since the given date"""
response = await client.get(
f"https://api.github.com/repos/{owner}/{repo}/issues",
params={
"state": "closed",
"sort": "updated",
"direction": "desc",
"since": since,
"per_page": 100,
},
)
response.raise_for_status()
tasks = []
since_date = datetime.strptime(since, "%Y-%m-%dT%H:%M:%SZ")
for issue in response.json():
if "pull_request" in issue:
continue
closed_at = datetime.strptime(issue["closed_at"], "%Y-%m-%dT%H:%M:%SZ")
if closed_at <= since_date:
continue
tasks.append(process_issue_events(client, owner, repo, issue))
results = await asyncio.gather(*tasks)
return [r for r in results if r is not None]
async def process_pull_request(
client: httpx.AsyncClient,
owner: str,
repo: str,
pr: Dict,
closed_issues: List[Dict],
) -> Optional[str]:
"""Process a single pull request"""
issue_number = extract_issue_number(pr.get("body"))
if issue_number:
issue = await get_issue_details(client, owner, repo, issue_number)
if issue:
if not any(i["number"] == issue["number"] for i in closed_issues):
return TEMPLATE.format(**issue)
return None
return TEMPLATE.format(title=pr["title"], number=pr["number"], url=pr["html_url"])
async def get_changes_since_last_release(
owner: str, repo: str, token: Optional[str] = None
) -> List[str]:
headers = {
"Accept": "application/vnd.github.v3+json",
}
if token:
headers["Authorization"] = f"token {token}"
else:
print(
"Warning: No token provided. API rate limiting may occur.", file=sys.stderr
)
async with httpx.AsyncClient(headers=headers, timeout=30.0) as client:
# Get the date of last minor release
since = await get_last_minor_release(client, owner, repo)
if not since:
return []
changes = []
# Get issues closed by commits to master
closed_issues = await get_closed_issues(client, owner, repo, since)
changes.extend([TEMPLATE.format(**issue) for issue in closed_issues])
# Get merged PRs
response = await client.get(
f"https://api.github.com/repos/{owner}/{repo}/pulls",
params={
"state": "closed",
"sort": "updated",
"direction": "desc",
"per_page": 100,
},
)
response.raise_for_status()
# Process PRs in parallel
pr_tasks = []
for pr in response.json():
if not pr["merged_at"]:
continue
if since and pr["merged_at"] <= since:
break
pr_tasks.append(
process_pull_request(client, owner, repo, pr, closed_issues)
)
pr_results = await asyncio.gather(*pr_tasks)
changes.extend([r for r in pr_results if r is not None])
return changes
async def main_async():
parser = argparse.ArgumentParser(description="Generate release notes from GitHub")
parser.add_argument("--token", "-t", help="the file path to the GitHub API token")
args = parser.parse_args()
token = None
if args.token:
with open(args.token) as f:
token = f.read().strip()
try:
url_path = REPOSITORY.rstrip("/").split("github.com/")[1]
owner, repo = url_path.split("/")[-2:]
except (ValueError, IndexError):
print("Error: Invalid GitHub URL", file=sys.stderr)
sys.exit(1)
try:
notes = await get_changes_since_last_release(owner, repo, token)
print("\n".join(notes))
except httpx.HTTPError as e:
print(f"Error: {e}", file=sys.stderr)
sys.exit(1)
except Exception as e:
print(f"Error: {e}", file=sys.stderr)
sys.exit(1)
def main():
asyncio.run(main_async())
if __name__ == "__main__":
main()

View file

@ -5,48 +5,54 @@ import subprocess
RELEASE_FILE = "RELEASE.md" RELEASE_FILE = "RELEASE.md"
QA_FILE = "QA.md" QA_FILE = "QA.md"
def git_root(): def git_root():
"""Get the root directory of the Git repo.""" """Get the root directory of the Git repo."""
# FIXME: Use a Git Python binding for this. # FIXME: Use a Git Python binding for this.
# FIXME: Make this work if called outside the repo. # FIXME: Make this work if called outside the repo.
path = subprocess.run( path = (
["git", "rev-parse", "--show-toplevel"], subprocess.run(
check=True, ["git", "rev-parse", "--show-toplevel"],
stdout=subprocess.PIPE, check=True,
).stdout.decode().strip("\n") stdout=subprocess.PIPE,
)
.stdout.decode()
.strip("\n")
)
return pathlib.Path(path) return pathlib.Path(path)
def extract_checkboxes(filename): def extract_checkboxes(filename):
headers = [] headers = []
result = [] result = []
with open(filename, 'r') as f: with open(filename, "r") as f:
lines = f.readlines() lines = f.readlines()
current_level = 0 current_level = 0
for line in lines: for line in lines:
line = line.rstrip() line = line.rstrip()
# If it's a header, store it # If it's a header, store it
if line.startswith('#'): if line.startswith("#"):
# Count number of # to determine header level # Count number of # to determine header level
level = len(line) - len(line.lstrip('#')) level = len(line) - len(line.lstrip("#"))
if level < current_level or not current_level: if level < current_level or not current_level:
headers.extend(['', line, '']) headers.extend(["", line, ""])
current_level = level current_level = level
elif level > current_level: elif level > current_level:
continue continue
else: else:
headers = ['', line, ''] headers = ["", line, ""]
# If it's a checkbox # If it's a checkbox
elif '- [ ]' in line or '- [x]' in line or '- [X]' in line: elif "- [ ]" in line or "- [x]" in line or "- [X]" in line:
# Print the last header if we haven't already # Print the last header if we haven't already
if headers: if headers:
result.extend(headers) result.extend(headers)
headers = [] headers = []
current_level = 0 current_level = 0
# If this is the "Do the QA tasks" line, recursively get QA tasks # If this is the "Do the QA tasks" line, recursively get QA tasks
if "Do the QA tasks" in line: if "Do the QA tasks" in line:
result.append(line) result.append(line)
@ -54,7 +60,8 @@ def extract_checkboxes(filename):
result.append(qa_tasks) result.append(qa_tasks)
else: else:
result.append(line) result.append(line)
return '\n'.join(result) return "\n".join(result)
if __name__ == '__main__':
print(extract_checkboxes(git_root() / RELEASE_FILE)) if __name__ == "__main__":
print(extract_checkboxes(git_root() / RELEASE_FILE))

View file

@ -3,20 +3,14 @@
import abc import abc
import argparse import argparse
import difflib import difflib
import json
import logging import logging
import re import re
import selectors import selectors
import subprocess import subprocess
import sys import sys
import urllib.request
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
PYTHON_VERSION_STR = "3.12"
PYTHON_VERSION = [int(num) for num in PYTHON_VERSION_STR.split(".")]
EOL_PYTHON_URL = "https://endoflife.date/api/python.json"
CONTENT_QA = r"""## QA CONTENT_QA = r"""## QA
To ensure that new releases do not introduce regressions, and support existing To ensure that new releases do not introduce regressions, and support existing
@ -827,26 +821,6 @@ class QAWindows(QABase):
while msvcrt.kbhit(): while msvcrt.kbhit():
msvcrt.getch() msvcrt.getch()
@QABase.task(f"Install the latest version of Python {PYTHON_VERSION_STR}", ref=REF_BUILD)
def install_python(self):
cur_version = list(sys.version_info[:3])
logger.info("Getting latest Python release")
with urllib.request.urlopen(EOL_PYTHON_URL) as f:
resp = f.read()
releases = json.loads(resp)
for release in releases:
if release["cycle"] == PYTHON_VERSION_STR:
latest_version = [int(num) for num in release["latest"].split(".")]
if latest_version > cur_version:
self.prompt(f"You need to install the latest Python version ({release['latest']})")
elif latest_version == cur_version:
logger.info(f"Verified that the latest Python version ({release['latest']}) is installed")
return
logger.error("Could not verify that the latest Python version is installed")
@QABase.task("Install and Run Docker Desktop", ref=REF_BUILD) @QABase.task("Install and Run Docker Desktop", ref=REF_BUILD)
def install_docker(self): def install_docker(self):
logger.info("Checking if Docker Desktop is installed and running") logger.info("Checking if Docker Desktop is installed and running")
@ -861,16 +835,12 @@ class QAWindows(QABase):
) )
def install_poetry(self): def install_poetry(self):
self.run("python", "-m", "pip", "install", "poetry") self.run("python", "-m", "pip", "install", "poetry")
self.run("poetry", "install", "--sync") self.run("poetry", "install")
@QABase.task("Build Dangerzone container image", ref=REF_BUILD, auto=True) @QABase.task("Build Dangerzone container image", ref=REF_BUILD, auto=True)
def build_image(self): def build_image(self):
self.run("python", r".\install\common\build-image.py") self.run("python", r".\install\common\build-image.py")
@QABase.task("Download Tesseract data", ref=REF_BUILD, auto=True)
def download_tessdata(self):
self.run("python", r".\install\common\download-tessdata.py")
@QABase.task("Run tests", ref="REF_BUILD", auto=True) @QABase.task("Run tests", ref="REF_BUILD", auto=True)
def run_tests(self): def run_tests(self):
# NOTE: Windows does not have Makefile by default. # NOTE: Windows does not have Makefile by default.
@ -887,11 +857,9 @@ class QAWindows(QABase):
return "windows" return "windows"
def start(self): def start(self):
self.install_python()
self.install_docker() self.install_docker()
self.install_poetry() self.install_poetry()
self.build_image() self.build_image()
self.download_tessdata()
self.run_tests() self.run_tests()
self.build_dangerzone_exe() self.build_dangerzone_exe()

92
poetry.lock generated
View file

@ -11,6 +11,28 @@ files = [
{file = "altgraph-0.17.4.tar.gz", hash = "sha256:1b5afbb98f6c4dcadb2e2ae6ab9fa994bbb8c1d75f4fa96d340f9437ae454406"}, {file = "altgraph-0.17.4.tar.gz", hash = "sha256:1b5afbb98f6c4dcadb2e2ae6ab9fa994bbb8c1d75f4fa96d340f9437ae454406"},
] ]
[[package]]
name = "anyio"
version = "4.6.2.post1"
description = "High level compatibility layer for multiple asynchronous event loop implementations"
optional = false
python-versions = ">=3.9"
files = [
{file = "anyio-4.6.2.post1-py3-none-any.whl", hash = "sha256:6d170c36fba3bdd840c73d3868c1e777e33676a69c3a72cf0a0d5d6d8009b61d"},
{file = "anyio-4.6.2.post1.tar.gz", hash = "sha256:4c8bc31ccdb51c7f7bd251f51c609e038d63e34219b44aa86e47576389880b4c"},
]
[package.dependencies]
exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""}
idna = ">=2.8"
sniffio = ">=1.1"
typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""}
[package.extras]
doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"]
test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21.0b1)"]
trio = ["trio (>=0.26.1)"]
[[package]] [[package]]
name = "appdirs" name = "appdirs"
version = "1.4.4" version = "1.4.4"
@ -404,6 +426,63 @@ files = [
[package.extras] [package.extras]
test = ["pytest (>=6)"] test = ["pytest (>=6)"]
[[package]]
name = "h11"
version = "0.14.0"
description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1"
optional = false
python-versions = ">=3.7"
files = [
{file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"},
{file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"},
]
[[package]]
name = "httpcore"
version = "1.0.7"
description = "A minimal low-level HTTP client."
optional = false
python-versions = ">=3.8"
files = [
{file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"},
{file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"},
]
[package.dependencies]
certifi = "*"
h11 = ">=0.13,<0.15"
[package.extras]
asyncio = ["anyio (>=4.0,<5.0)"]
http2 = ["h2 (>=3,<5)"]
socks = ["socksio (==1.*)"]
trio = ["trio (>=0.22.0,<1.0)"]
[[package]]
name = "httpx"
version = "0.27.2"
description = "The next generation HTTP client."
optional = false
python-versions = ">=3.8"
files = [
{file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"},
{file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"},
]
[package.dependencies]
anyio = "*"
certifi = "*"
httpcore = "==1.*"
idna = "*"
sniffio = "*"
[package.extras]
brotli = ["brotli", "brotlicffi"]
cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"]
http2 = ["h2 (>=3,<5)"]
socks = ["socksio (==1.*)"]
zstd = ["zstandard (>=0.18.0)"]
[[package]] [[package]]
name = "idna" name = "idna"
version = "3.10" version = "3.10"
@ -991,6 +1070,17 @@ files = [
{file = "shiboken6-6.8.0.2-cp39-abi3-win_amd64.whl", hash = "sha256:b11e750e696bb565d897e0f5836710edfb86bd355f87b09988bd31b2aad404d3"}, {file = "shiboken6-6.8.0.2-cp39-abi3-win_amd64.whl", hash = "sha256:b11e750e696bb565d897e0f5836710edfb86bd355f87b09988bd31b2aad404d3"},
] ]
[[package]]
name = "sniffio"
version = "1.3.1"
description = "Sniff out which async library your code is running under"
optional = false
python-versions = ">=3.7"
files = [
{file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"},
{file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"},
]
[[package]] [[package]]
name = "strip-ansi" name = "strip-ansi"
version = "0.1.1" version = "0.1.1"
@ -1099,4 +1189,4 @@ type = ["pytest-mypy"]
[metadata] [metadata]
lock-version = "2.0" lock-version = "2.0"
python-versions = ">=3.9,<3.13" python-versions = ">=3.9,<3.13"
content-hash = "44356eaeeb3dbe23b922413ee68f8c73c6ce8ebbdee8a80afecb410e060d0382" content-hash = "5d1ff28aa04c3a814280e55c0b2a307efe5ca953cd4cb281056c35fd2e53fdf0"

View file

@ -57,6 +57,9 @@ pytest-rerunfailures = "^14.0"
[tool.poetry.group.container.dependencies] [tool.poetry.group.container.dependencies]
pymupdf = "1.24.11" # Last version to support python 3.8 (needed for Ubuntu Focal support) pymupdf = "1.24.11" # Last version to support python 3.8 (needed for Ubuntu Focal support)
[tool.poetry.group.dev.dependencies]
httpx = "^0.27.2"
[tool.isort] [tool.isort]
profile = "black" profile = "black"
skip_gitignore = true skip_gitignore = true