mirror of
https://github.com/freedomofpress/dangerzone.git
synced 2025-05-06 13:31:50 +02:00
Compare commits
15 commits
1ed1dcee6b
...
396d53b130
Author | SHA1 | Date | |
---|---|---|---|
![]() |
396d53b130 | ||
![]() |
2f29095b31 | ||
![]() |
52eae7cd00 | ||
![]() |
ece58cba06 | ||
![]() |
eec4e6a5c3 | ||
![]() |
02261b112e | ||
![]() |
f400205c74 | ||
![]() |
5b1fe4d7ad | ||
![]() |
53214d33d8 | ||
![]() |
7f7fe43711 | ||
![]() |
f31fbfefc6 | ||
![]() |
96e64deae7 | ||
![]() |
60df4f7e35 | ||
![]() |
9fa3c80404 | ||
![]() |
4bf7f9cbb4 |
20 changed files with 1145 additions and 377 deletions
2
.github/workflows/build.yml
vendored
2
.github/workflows/build.yml
vendored
|
@ -74,6 +74,8 @@ jobs:
|
|||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get current date
|
||||
id: date
|
||||
|
|
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
|
@ -48,6 +48,8 @@ jobs:
|
|||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Get current date
|
||||
id: date
|
||||
|
|
2
.github/workflows/scan.yml
vendored
2
.github/workflows/scan.yml
vendored
|
@ -14,6 +14,8 @@ jobs:
|
|||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Install container build dependencies
|
||||
run: sudo apt install pipx && pipx install poetry
|
||||
- name: Build container image
|
||||
|
|
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -149,3 +149,4 @@ share/container.tar
|
|||
share/container.tar.gz
|
||||
share/image-id.txt
|
||||
container/container-pip-requirements.txt
|
||||
.doit.db.db
|
||||
|
|
|
@ -16,6 +16,10 @@ since 0.4.1, and this project adheres to [Semantic Versioning](https://semver.or
|
|||
|
||||
- Platform support: Drop support for Fedora 39, since it's end-of-life ([#999](https://github.com/freedomofpress/dangerzone/pull/999))
|
||||
|
||||
### Development changes
|
||||
|
||||
- Automate a large portion of our release tasks with `doit` ([#1016](https://github.com/freedomofpress/dangerzone/issues/1016))
|
||||
|
||||
## [0.8.0](https://github.com/freedomofpress/dangerzone/compare/v0.8.0...0.7.1)
|
||||
|
||||
### Added
|
||||
|
|
199
QA.md
Normal file
199
QA.md
Normal file
|
@ -0,0 +1,199 @@
|
|||
## QA
|
||||
|
||||
To ensure that new releases do not introduce regressions, and support existing
|
||||
and newer platforms, we have to test that the produced packages work as expected.
|
||||
|
||||
Check the following:
|
||||
|
||||
- [ ] Make sure that the tip of the `main` branch passes the CI tests.
|
||||
- [ ] Make sure that the Apple account has a valid application password and has
|
||||
agreed to the latest Apple terms (see [macOS release](#macos-release)
|
||||
section).
|
||||
|
||||
Because it is repetitive, we wrote a script to help with the QA.
|
||||
It can run the tasks for you, pausing when it needs manual intervention.
|
||||
|
||||
You can run it with a command like:
|
||||
|
||||
```bash
|
||||
poetry run ./dev_scripts/qa.py {distro}-{version}
|
||||
```
|
||||
|
||||
### The checklist
|
||||
|
||||
- [ ] Create a test build in Windows and make sure it works:
|
||||
- [ ] Check if the suggested Python version is still supported.
|
||||
- [ ] Create a new development environment with Poetry.
|
||||
- [ ] Build the container image and ensure the development environment uses
|
||||
the new image.
|
||||
- [ ] Download the OCR language data using `./install/common/download-tessdata.py`
|
||||
- [ ] Run the Dangerzone tests.
|
||||
- [ ] Build and run the Dangerzone .exe
|
||||
- [ ] Test some QA scenarios (see [Scenarios](#Scenarios) below).
|
||||
- [ ] Create a test build in macOS (Intel CPU) and make sure it works:
|
||||
- [ ] Check if the suggested Python version is still supported.
|
||||
- [ ] Create a new development environment with Poetry.
|
||||
- [ ] Build the container image and ensure the development environment uses
|
||||
the new image.
|
||||
- [ ] Download the OCR language data using `./install/common/download-tessdata.py`
|
||||
- [ ] Run the Dangerzone tests.
|
||||
- [ ] Create and run an app bundle.
|
||||
- [ ] Test some QA scenarios (see [Scenarios](#Scenarios) below).
|
||||
- [ ] Create a test build in macOS (M1/2 CPU) and make sure it works:
|
||||
- [ ] Check if the suggested Python version is still supported.
|
||||
- [ ] Create a new development environment with Poetry.
|
||||
- [ ] Build the container image and ensure the development environment uses
|
||||
the new image.
|
||||
- [ ] Download the OCR language data using `./install/common/download-tessdata.py`
|
||||
- [ ] Run the Dangerzone tests.
|
||||
- [ ] Create and run an app bundle.
|
||||
- [ ] Test some QA scenarios (see [Scenarios](#Scenarios) below).
|
||||
- [ ] Create a test build in the most recent Ubuntu LTS platform (Ubuntu 24.04
|
||||
as of writing this) and make sure it works:
|
||||
- [ ] Create a new development environment with Poetry.
|
||||
- [ ] Build the container image and ensure the development environment uses
|
||||
the new image.
|
||||
- [ ] Download the OCR language data using `./install/common/download-tessdata.py`
|
||||
- [ ] Run the Dangerzone tests.
|
||||
- [ ] Create a .deb package and install it system-wide.
|
||||
- [ ] Test some QA scenarios (see [Scenarios](#Scenarios) below).
|
||||
- [ ] Create a test build in the most recent Fedora platform (Fedora 41 as of
|
||||
writing this) and make sure it works:
|
||||
- [ ] Create a new development environment with Poetry.
|
||||
- [ ] Build the container image and ensure the development environment uses
|
||||
the new image.
|
||||
- [ ] Download the OCR language data using `./install/common/download-tessdata.py`
|
||||
- [ ] Run the Dangerzone tests.
|
||||
- [ ] Create an .rpm package and install it system-wide.
|
||||
- [ ] Test some QA scenarios (see [Scenarios](#Scenarios) below).
|
||||
- [ ] Create a test build in the most recent Qubes Fedora template (Fedora 40 as
|
||||
of writing this) and make sure it works:
|
||||
- [ ] Create a new development environment with Poetry.
|
||||
- [ ] Run the Dangerzone tests.
|
||||
- [ ] Create a Qubes .rpm package and install it system-wide.
|
||||
- [ ] Ensure that the Dangerzone application appears in the "Applications"
|
||||
tab.
|
||||
- [ ] Test some QA scenarios (see [Scenarios](#Scenarios) below) and make sure
|
||||
they spawn disposable qubes.
|
||||
|
||||
### Scenarios
|
||||
|
||||
#### 1. Dangerzone correctly identifies that Docker/Podman is not installed
|
||||
|
||||
_(Only for MacOS / Windows)_
|
||||
|
||||
Temporarily hide the Docker/Podman binaries, e.g., rename the `docker` /
|
||||
`podman` binaries to something else. Then run Dangerzone. Dangerzone should
|
||||
prompt the user to install Docker/Podman.
|
||||
|
||||
#### 2. Dangerzone correctly identifies that Docker is not running
|
||||
|
||||
_(Only for MacOS / Windows)_
|
||||
|
||||
Stop the Docker Desktop application. Then run Dangerzone. Dangerzone should
|
||||
prompt the user to start Docker Desktop.
|
||||
|
||||
|
||||
#### 3. Updating Dangerzone handles external state correctly.
|
||||
|
||||
_(Applies to Windows/MacOS)_
|
||||
|
||||
Install the previous version of Dangerzone, downloaded from the website.
|
||||
|
||||
Open the Dangerzone application and enable some non-default settings.
|
||||
**If there are new settings, make sure to change those as well**.
|
||||
|
||||
Close the Dangerzone application and get the container image for that
|
||||
version. For example:
|
||||
|
||||
```
|
||||
$ docker images dangerzone.rocks/dangerzone
|
||||
REPOSITORY TAG IMAGE ID CREATED SIZE
|
||||
dangerzone.rocks/dangerzone latest <image ID> <date> <size>
|
||||
dangerzone.rocks/dangerzone <tag> <image ID> <date> <size>
|
||||
```
|
||||
|
||||
Then run the version under QA and ensure that the settings remain changed.
|
||||
|
||||
Afterwards check that new docker image was installed by running the same command
|
||||
and seeing the following differences:
|
||||
|
||||
```
|
||||
$ docker images dangerzone.rocks/dangerzone
|
||||
REPOSITORY TAG IMAGE ID CREATED SIZE
|
||||
dangerzone.rocks/dangerzone latest <different ID> <newer date> <different size>
|
||||
dangerzone.rocks/dangerzone <other tag> <different ID> <newer date> <different size>
|
||||
```
|
||||
|
||||
#### 4. Dangerzone successfully installs the container image
|
||||
|
||||
_(Only for Linux)_
|
||||
|
||||
Remove the Dangerzone container image from Docker/Podman. Then run Dangerzone.
|
||||
Dangerzone should install the container image successfully.
|
||||
|
||||
#### 5. Dangerzone retains the settings of previous runs
|
||||
|
||||
Run Dangerzone and make some changes in the settings (e.g., change the OCR
|
||||
language, toggle whether to open the document after conversion, etc.). Restart
|
||||
Dangerzone. Dangerzone should show the settings that the user chose.
|
||||
|
||||
#### 6. Dangerzone reports failed conversions
|
||||
|
||||
Run Dangerzone and convert the `tests/test_docs/sample_bad_pdf.pdf` document.
|
||||
Dangerzone should fail gracefully, by reporting that the operation failed, and
|
||||
showing the following error message:
|
||||
|
||||
> The document format is not supported
|
||||
|
||||
#### 7. Dangerzone succeeds in converting multiple documents
|
||||
|
||||
Run Dangerzone against a list of documents, and tick all options. Ensure that:
|
||||
* Conversions take place sequentially.
|
||||
* Attempting to close the window while converting asks the user if they want to
|
||||
abort the conversions.
|
||||
* Conversions are completed successfully.
|
||||
* Conversions show individual progress in real-time (double-check for Qubes).
|
||||
* _(Only for Linux)_ The resulting files open with the PDF viewer of our choice.
|
||||
* OCR seems to have detected characters in the PDF files.
|
||||
* The resulting files have been saved with the proper suffix, in the proper
|
||||
location.
|
||||
* The original files have been saved in the `unsafe/` directory.
|
||||
|
||||
#### 8. Dangerzone is able to handle drag-n-drop
|
||||
|
||||
Run Dangerzone against a set of documents that you drag-n-drop. Files should be
|
||||
added and conversion should run without issue.
|
||||
|
||||
> [!TIP]
|
||||
> On our end-user container environments for Linux, we can start a file manager
|
||||
> with `thunar &`.
|
||||
|
||||
#### 9. Dangerzone CLI succeeds in converting multiple documents
|
||||
|
||||
_(Only for Windows and Linux)_
|
||||
|
||||
Run Dangerzone CLI against a list of documents. Ensure that conversions happen
|
||||
sequentially, are completed successfully, and we see their progress.
|
||||
|
||||
#### 10. Dangerzone can open a document for conversion via right-click -> "Open With"
|
||||
|
||||
_(Only for Windows, MacOS and Qubes)_
|
||||
|
||||
Go to a directory with office documents, right-click on one, and click on "Open
|
||||
With". We should be able to open the file with Dangerzone, and then convert it.
|
||||
|
||||
#### 11. Dangerzone shows helpful errors for setup issues on Qubes
|
||||
|
||||
_(Only for Qubes)_
|
||||
|
||||
Check what errors does Dangerzone throw in the following scenarios. The errors
|
||||
should point the user to the Qubes notifications in the top-right corner:
|
||||
|
||||
1. The `dz-dvm` template does not exist. We can trigger this scenario by
|
||||
temporarily renaming this template.
|
||||
2. The Dangerzone RPC policy does not exist. We can trigger this scenario by
|
||||
temporarily renaming the `dz.Convert` policy.
|
||||
3. The `dz-dvm` disposable Qube cannot start due to insufficient resources. We
|
||||
can trigger this scenario by temporarily increasing the minimum required RAM
|
||||
of the `dz-dvm` template to more than the available amount.
|
414
RELEASE.md
414
RELEASE.md
|
@ -1,12 +1,17 @@
|
|||
# Release instructions
|
||||
|
||||
This section documents the release process. Unless you're a dangerzone developer making a release, you'll probably never need to follow it.
|
||||
This section documents how we currently release Dangerzone for the different distributions we support.
|
||||
|
||||
## Pre-release
|
||||
|
||||
Before making a release, all of these should be complete:
|
||||
Here is a list of tasks that should be done before issuing the release:
|
||||
|
||||
- [ ] Copy the checkboxes from these instructions onto a new issue and call it **QA and Release version \<VERSION\>**
|
||||
- [ ] Create a new issue named **QA and Release for version \<VERSION\>**, to track the general progress.
|
||||
You can generate its content with:
|
||||
|
||||
```
|
||||
poetry run ./dev_scripts/generate-release-tasks.py`
|
||||
```
|
||||
- [ ] [Add new Linux platforms and remove obsolete ones](https://github.com/freedomofpress/dangerzone/blob/main/RELEASE.md#add-new-platforms-and-remove-obsolete-ones)
|
||||
- [ ] Bump the Python dependencies using `poetry lock`
|
||||
- [ ] Update `version` in `pyproject.toml`
|
||||
|
@ -15,6 +20,8 @@ Before making a release, all of these should be complete:
|
|||
- [ ] Bump the Debian version by adding a new changelog entry in `debian/changelog`
|
||||
- [ ] Update screenshot in `README.md`, if necessary
|
||||
- [ ] CHANGELOG.md should be updated to include a list of all major changes since the last release
|
||||
- [ ] A draft release should be created. Copy the release notes text from the template at [`docs/templates/release-notes`](https://github.com/freedomofpress/dangerzone/tree/main/docs/templates/)
|
||||
- [ ] Do the QA tasks
|
||||
|
||||
## Add new Linux platforms and remove obsolete ones
|
||||
|
||||
|
@ -37,7 +44,7 @@ In case of a new version (beta, RC, or official release):
|
|||
`BUILD.md` files where necessary.
|
||||
4. Send a PR with the above changes.
|
||||
|
||||
In case of an EOL version:
|
||||
In case of the removal of a version:
|
||||
|
||||
1. Remove any mention to this version from our repo.
|
||||
* Consult the previous paragraph, but also `grep` your way around.
|
||||
|
@ -51,192 +58,13 @@ Follow the instructions in `docs/developer/TESTING.md` to run the tests.
|
|||
|
||||
These tests will identify any regressions or progression in terms of document coverage.
|
||||
|
||||
## QA
|
||||
|
||||
To ensure that new releases do not introduce regressions, and support existing
|
||||
and newer platforms, we have to do the following:
|
||||
|
||||
- [ ] Make sure that the tip of the `main` branch passes the CI tests.
|
||||
- [ ] Make sure that the Apple account has a valid application password and has
|
||||
agreed to the latest Apple terms (see [macOS release](#macos-release)
|
||||
section).
|
||||
- [ ] Create a test build in Windows and make sure it works:
|
||||
- [ ] Check if the suggested Python version is still supported.
|
||||
- [ ] Create a new development environment with Poetry.
|
||||
- [ ] Build the container image and ensure the development environment uses
|
||||
the new image.
|
||||
- [ ] Run the Dangerzone tests.
|
||||
- [ ] Build and run the Dangerzone .exe
|
||||
- [ ] Test some QA scenarios (see [Scenarios](#Scenarios) below).
|
||||
- [ ] Create a test build in macOS (Intel CPU) and make sure it works:
|
||||
- [ ] Check if the suggested Python version is still supported.
|
||||
- [ ] Create a new development environment with Poetry.
|
||||
- [ ] Build the container image and ensure the development environment uses
|
||||
the new image.
|
||||
- [ ] Run the Dangerzone tests.
|
||||
- [ ] Create and run an app bundle.
|
||||
- [ ] Test some QA scenarios (see [Scenarios](#Scenarios) below).
|
||||
- [ ] Create a test build in macOS (M1/2 CPU) and make sure it works:
|
||||
- [ ] Check if the suggested Python version is still supported.
|
||||
- [ ] Create a new development environment with Poetry.
|
||||
- [ ] Build the container image and ensure the development environment uses
|
||||
the new image.
|
||||
- [ ] Run the Dangerzone tests.
|
||||
- [ ] Create and run an app bundle.
|
||||
- [ ] Test some QA scenarios (see [Scenarios](#Scenarios) below).
|
||||
- [ ] Create a test build in the most recent Ubuntu LTS platform (Ubuntu 24.04
|
||||
as of writing this) and make sure it works:
|
||||
- [ ] Create a new development environment with Poetry.
|
||||
- [ ] Build the container image and ensure the development environment uses
|
||||
the new image.
|
||||
- [ ] Run the Dangerzone tests.
|
||||
- [ ] Create a .deb package and install it system-wide.
|
||||
- [ ] Test some QA scenarios (see [Scenarios](#Scenarios) below).
|
||||
- [ ] Create a test build in the most recent Fedora platform (Fedora 41 as of
|
||||
writing this) and make sure it works:
|
||||
- [ ] Create a new development environment with Poetry.
|
||||
- [ ] Build the container image and ensure the development environment uses
|
||||
the new image.
|
||||
- [ ] Run the Dangerzone tests.
|
||||
- [ ] Create an .rpm package and install it system-wide.
|
||||
- [ ] Test some QA scenarios (see [Scenarios](#Scenarios) below).
|
||||
- [ ] Create a test build in the most recent Qubes Fedora template (Fedora 40 as
|
||||
of writing this) and make sure it works:
|
||||
- [ ] Create a new development environment with Poetry.
|
||||
- [ ] Run the Dangerzone tests.
|
||||
- [ ] Create a Qubes .rpm package and install it system-wide.
|
||||
- [ ] Ensure that the Dangerzone application appears in the "Applications"
|
||||
tab.
|
||||
- [ ] Test some QA scenarios (see [Scenarios](#Scenarios) below) and make sure
|
||||
they spawn disposable qubes.
|
||||
|
||||
### Scenarios
|
||||
|
||||
#### 1. Dangerzone correctly identifies that Docker/Podman is not installed
|
||||
|
||||
_(Only for MacOS / Windows)_
|
||||
|
||||
Temporarily hide the Docker/Podman binaries, e.g., rename the `docker` /
|
||||
`podman` binaries to something else. Then run Dangerzone. Dangerzone should
|
||||
prompt the user to install Docker/Podman.
|
||||
|
||||
#### 2. Dangerzone correctly identifies that Docker is not running
|
||||
|
||||
_(Only for MacOS / Windows)_
|
||||
|
||||
Stop the Docker Desktop application. Then run Dangerzone. Dangerzone should
|
||||
prompt the user to start Docker Desktop.
|
||||
|
||||
|
||||
#### 3. Updating Dangerzone handles external state correctly.
|
||||
|
||||
_(Applies to Windows/MacOS)_
|
||||
|
||||
Install the previous version of Dangerzone, downloaded from the website.
|
||||
|
||||
Open the Dangerzone application and enable some non-default settings.
|
||||
**If there are new settings, make sure to change those as well**.
|
||||
|
||||
Close the Dangerzone application and get the container image for that
|
||||
version. For example:
|
||||
|
||||
```
|
||||
$ docker images dangerzone.rocks/dangerzone:latest
|
||||
REPOSITORY TAG IMAGE ID CREATED SIZE
|
||||
dangerzone.rocks/dangerzone latest <image ID> <date> <size>
|
||||
```
|
||||
|
||||
Then run the version under QA and ensure that the settings remain changed.
|
||||
|
||||
Afterwards check that new docker image was installed by running the same command
|
||||
and seeing the following differences:
|
||||
|
||||
```
|
||||
$ docker images dangerzone.rocks/dangerzone:latest
|
||||
REPOSITORY TAG IMAGE ID CREATED SIZE
|
||||
dangerzone.rocks/dangerzone latest <different ID> <newer date> <different size>
|
||||
```
|
||||
|
||||
#### 4. Dangerzone successfully installs the container image
|
||||
|
||||
_(Only for Linux)_
|
||||
|
||||
Remove the Dangerzone container image from Docker/Podman. Then run Dangerzone.
|
||||
Dangerzone should install the container image successfully.
|
||||
|
||||
#### 5. Dangerzone retains the settings of previous runs
|
||||
|
||||
Run Dangerzone and make some changes in the settings (e.g., change the OCR
|
||||
language, toggle whether to open the document after conversion, etc.). Restart
|
||||
Dangerzone. Dangerzone should show the settings that the user chose.
|
||||
|
||||
#### 6. Dangerzone reports failed conversions
|
||||
|
||||
Run Dangerzone and convert the `tests/test_docs/sample_bad_pdf.pdf` document.
|
||||
Dangerzone should fail gracefully, by reporting that the operation failed, and
|
||||
showing the following error message:
|
||||
|
||||
> The document format is not supported
|
||||
|
||||
#### 7. Dangerzone succeeds in converting multiple documents
|
||||
|
||||
Run Dangerzone against a list of documents, and tick all options. Ensure that:
|
||||
* Conversions take place sequentially.
|
||||
* Attempting to close the window while converting asks the user if they want to
|
||||
abort the conversions.
|
||||
* Conversions are completed successfully.
|
||||
* Conversions show individual progress in real-time (double-check for Qubes).
|
||||
* _(Only for Linux)_ The resulting files open with the PDF viewer of our choice.
|
||||
* OCR seems to have detected characters in the PDF files.
|
||||
* The resulting files have been saved with the proper suffix, in the proper
|
||||
location.
|
||||
* The original files have been saved in the `unsafe/` directory.
|
||||
|
||||
#### 8. Dangerzone is able to handle drag-n-drop
|
||||
|
||||
Run Dangerzone against a set of documents that you drag-n-drop. Files should be
|
||||
added and conversion should run without issue.
|
||||
|
||||
> [!TIP]
|
||||
> On our end-user container environments for Linux, we can start a file manager
|
||||
> with `thunar &`.
|
||||
|
||||
#### 9. Dangerzone CLI succeeds in converting multiple documents
|
||||
|
||||
_(Only for Windows and Linux)_
|
||||
|
||||
Run Dangerzone CLI against a list of documents. Ensure that conversions happen
|
||||
sequentially, are completed successfully, and we see their progress.
|
||||
|
||||
#### 10. Dangerzone can open a document for conversion via right-click -> "Open With"
|
||||
|
||||
_(Only for Windows, MacOS and Qubes)_
|
||||
|
||||
Go to a directory with office documents, right-click on one, and click on "Open
|
||||
With". We should be able to open the file with Dangerzone, and then convert it.
|
||||
|
||||
#### 11. Dangerzone shows helpful errors for setup issues on Qubes
|
||||
|
||||
_(Only for Qubes)_
|
||||
|
||||
Check what errors does Dangerzone throw in the following scenarios. The errors
|
||||
should point the user to the Qubes notifications in the top-right corner:
|
||||
|
||||
1. The `dz-dvm` template does not exist. We can trigger this scenario by
|
||||
temporarily renaming this template.
|
||||
2. The Dangerzone RPC policy does not exist. We can trigger this scenario by
|
||||
temporarily renaming the `dz.Convert` policy.
|
||||
3. The `dz-dvm` disposable Qube cannot start due to insufficient resources. We
|
||||
can trigger this scenario by temporarily increasing the minimum required RAM
|
||||
of the `dz-dvm` template to more than the available amount.
|
||||
|
||||
## Release
|
||||
|
||||
Once we are confident that the release will be out shortly, and doesn't need any more changes:
|
||||
|
||||
- [ ] Create a PGP-signed git tag for the version, e.g., for dangerzone `v0.1.0`:
|
||||
|
||||
```
|
||||
```bash
|
||||
git tag -s v0.1.0
|
||||
git push origin v0.1.0
|
||||
```
|
||||
|
@ -252,6 +80,17 @@ Once we are confident that the release will be out shortly, and doesn't need any
|
|||
|
||||
### macOS Release
|
||||
|
||||
> [!TIP]
|
||||
> You can automate these steps from your macOS terminal app with:
|
||||
>
|
||||
> ```
|
||||
> doit clean
|
||||
> doit -n 8 apple_id=<email> # for Intel macOS
|
||||
> doit -n 8 apple_id=<email> macos_build_dmg # for Apple Silicon macOS
|
||||
> ```
|
||||
|
||||
The following needs to happen for both Silicon and Intel chipsets.
|
||||
|
||||
#### Initial Setup
|
||||
|
||||
- Build machine must have:
|
||||
|
@ -266,48 +105,83 @@ Once we are confident that the release will be out shortly, and doesn't need any
|
|||
|
||||
#### Releasing and Signing
|
||||
|
||||
Here is what you need to do:
|
||||
|
||||
- [ ] Verify and install the latest supported Python version from
|
||||
[python.org](https://www.python.org/downloads/macos/) (do not use the one from
|
||||
brew as it is known to [cause issues](https://github.com/freedomofpress/dangerzone/issues/471))
|
||||
* In case of a new Python installation or minor version upgrade, e.g., from
|
||||
3.11 to 3.12 , reinstall Poetry with `python3 -m pip install poetry`
|
||||
* You can verify the correct Python version is used with `poetry debug info`
|
||||
- [ ] Verify and checkout the git tag for this release
|
||||
- [ ] Run `poetry install --sync`
|
||||
- [ ] On the silicon mac, build the container image:
|
||||
|
||||
- [ ] Checkout the dependencies, and clean your local copy:
|
||||
|
||||
```bash
|
||||
|
||||
# In case of a new Python installation or minor version upgrade, e.g., from
|
||||
# 3.11 to 3.12, reinstall Poetry
|
||||
python3 -m pip install poetry
|
||||
|
||||
# You can verify the correct Python version is used
|
||||
poetry debug info
|
||||
|
||||
# Replace with the actual version
|
||||
export DZ_VERSION=$(cat share/version.txt)
|
||||
|
||||
# Verify and checkout the git tag for this release:
|
||||
git checkout -f v$VERSION
|
||||
|
||||
# Clean the git repository
|
||||
git clean -df
|
||||
|
||||
# Clean up the environment
|
||||
poetry env remove --all
|
||||
|
||||
# Install the dependencies
|
||||
poetry install --sync
|
||||
```
|
||||
python3 ./install/common/build-image.py
|
||||
|
||||
- [ ] Build the container image and the OCR language data
|
||||
|
||||
```bash
|
||||
poetry run ./install/common/build-image.py
|
||||
poetry run ./install/common/download-tessdata.py
|
||||
|
||||
# Copy the container image to the assets folder
|
||||
cp share/container.tar.gz ~dz/release-assets/$VERSION/dangerzone-$VERSION-arm64.tar.gz
|
||||
cp share/image-id.txt ~dz/release-assets/$VERSION/.
|
||||
```
|
||||
Then copy the `share/container.tar.gz` to the assets folder on `dangerzone-$VERSION-arm64.tar.gz`, along with the `share/image-id.txt` file.
|
||||
- [ ] Run `poetry run ./install/macos/build-app.py`; this will make `dist/Dangerzone.app`
|
||||
- [ ] Make sure that the build application works with the containerd graph
|
||||
driver (see [#933](https://github.com/freedomofpress/dangerzone/issues/933))
|
||||
- [ ] Run `poetry run ./install/macos/build-app.py --only-codesign`; this will make `dist/Dangerzone.dmg`
|
||||
* You need to run this command as the account that has access to the code signing certificate
|
||||
* You must run this command from the MacOS UI, from a terminal application.
|
||||
- [ ] Notarize it: `xcrun notarytool submit --wait --apple-id "<email>" --keychain-profile "dz-notarytool-release-key" dist/Dangerzone.dmg`
|
||||
* You need to change the `<email>` in the above command with the email
|
||||
associated with the Apple Developer ID.
|
||||
* This command assumes that you have created, and stored in the Keychain, an
|
||||
|
||||
- [ ] Build the app bundle
|
||||
|
||||
```bash
|
||||
poetry run ./install/macos/build-app.py
|
||||
```
|
||||
|
||||
- [ ] Sign the application bundle, and notarize it
|
||||
|
||||
You need to run this command as the account that has access to the code signing certificate
|
||||
|
||||
This command assumes that you have created, and stored in the Keychain, an
|
||||
application password associated with your Apple Developer ID, which will be
|
||||
used specifically for `notarytool`.
|
||||
- [ ] Wait for it to get approved:
|
||||
* If it gets rejected, you should be able to see why with the same command
|
||||
(or use the `log` option for a more verbose JSON output)
|
||||
* You will also receive an update in your email.
|
||||
- [ ] After it's approved, staple the ticket: `xcrun stapler staple dist/Dangerzone.dmg`
|
||||
|
||||
This process ends up with the final file:
|
||||
```bash
|
||||
# Sign the .App and make it a .dmg
|
||||
poetry run ./install/macos/build-app.py --only-codesign
|
||||
|
||||
```
|
||||
dist/Dangerzone.dmg
|
||||
```
|
||||
# Notarize it. You must run this command from the MacOS UI
|
||||
# from a terminal application.
|
||||
xcrun notarytool submit ./dist/Dangerzone.dmg --apple-id $APPLE_ID --keychain-profile "dz-notarytool-release-key" --wait && xcrun stapler staple dist/Dangerzone.dmg
|
||||
|
||||
Rename `Dangerzone.dmg` to `Dangerzone-$VERSION.dmg`.
|
||||
# Copy the .dmg to the assets folder
|
||||
ARCH=$(uname -m)
|
||||
if [ "$ARCH" = "x86_64" ]; then
|
||||
ARCH="i686"
|
||||
fi
|
||||
cp dist/Dangerzone.dmg ~dz/release-assets/$VERSION/Dangerzone-$VERSION-$ARCH.dmg
|
||||
```
|
||||
|
||||
### Windows Release
|
||||
|
||||
The Windows release is performed in a Windows 11 virtual machine as opposed to a physical one.
|
||||
The Windows release is performed in a Windows 11 virtual machine (as opposed to a physical one).
|
||||
|
||||
#### Initial Setup
|
||||
|
||||
|
@ -321,14 +195,34 @@ The Windows release is performed in a Windows 11 virtual machine as opposed to a
|
|||
|
||||
#### Releasing and Signing
|
||||
|
||||
- [ ] Verify and checkout the git tag for this release
|
||||
- [ ] Run `poetry install --sync`
|
||||
- [ ] Checkout the dependencies, and clean your local copy:
|
||||
```bash
|
||||
# In case of a new Python installation or minor version upgrade, e.g., from
|
||||
# 3.11 to 3.12, reinstall Poetry
|
||||
python3 -m pip install poetry
|
||||
|
||||
# You can verify the correct Python version is used
|
||||
poetry debug info
|
||||
|
||||
# Replace with the actual version
|
||||
export DZ_VERSION=$(cat share/version.txt)
|
||||
|
||||
# Verify and checkout the git tag for this release:
|
||||
git checkout -f v$VERSION
|
||||
|
||||
# Clean the git repository
|
||||
git clean -df
|
||||
|
||||
# Clean up the environment
|
||||
poetry env remove --all
|
||||
|
||||
# Install the dependencies
|
||||
poetry install --sync
|
||||
```
|
||||
|
||||
- [ ] Copy the container image into the VM
|
||||
> [!IMPORTANT]
|
||||
> Instead of running `python .\install\windows\build-image.py` in the VM, run the build image script on the host (making sure to build for `linux/amd64`). Copy `share/container.tar.gz` and `share/image-id.txt` from the host into the `share` folder in the VM.
|
||||
> Also, don't forget to add the supplementary image ID (see
|
||||
> [#933](https://github.com/freedomofpress/dangerzone/issues/933)) in
|
||||
> `share/image-id.txt`)
|
||||
- [ ] Run `poetry run .\install\windows\build-app.bat`
|
||||
- [ ] When you're done you will have `dist\Dangerzone.msi`
|
||||
|
||||
|
@ -336,12 +230,17 @@ Rename `Dangerzone.msi` to `Dangerzone-$VERSION.msi`.
|
|||
|
||||
### Linux release
|
||||
|
||||
> [!INFO]
|
||||
> Below we explain how we build packages for each Linux distribution we support.
|
||||
> [!TIP]
|
||||
> You can automate these steps from any Linux distribution with:
|
||||
>
|
||||
> There is also a `release.sh` script available which creates all
|
||||
> the `.rpm` and `.deb` files with a single command.
|
||||
> ```
|
||||
> doit clean
|
||||
> doit -n 8 fedora_rpm debian_deb
|
||||
> ```
|
||||
>
|
||||
> You can then add the created artifacts to the appropriate APT/YUM repo.
|
||||
|
||||
Below we explain how we build packages for each Linux distribution we support.
|
||||
|
||||
#### Debian/Ubuntu
|
||||
|
||||
|
@ -354,21 +253,15 @@ instructions in our build section](https://github.com/freedomofpress/dangerzone/
|
|||
or create your own locally with:
|
||||
|
||||
```sh
|
||||
# Create and run debian bookworm development environment
|
||||
./dev_scripts/env.py --distro debian --version bookworm build-dev
|
||||
./dev_scripts/env.py --distro debian --version bookworm run --dev bash
|
||||
cd dangerzone
|
||||
```
|
||||
|
||||
Build the latest container:
|
||||
# Build the latest container
|
||||
./dev_scripts/env.py --distro debian --version bookworm run --dev bash -c "cd dangerzone && poetry run ./install/common/build-image.py"
|
||||
|
||||
```sh
|
||||
python3 ./install/common/build-image.py
|
||||
```
|
||||
|
||||
Create a .deb:
|
||||
|
||||
```sh
|
||||
./install/linux/build-deb.py
|
||||
# Create a .deb
|
||||
./dev_scripts/env.py --distro debian --version bookworm run --dev bash -c "cd dangerzone && ./install/linux/build-deb.py"
|
||||
```
|
||||
|
||||
Publish the .deb under `./deb_dist` to the
|
||||
|
@ -387,22 +280,12 @@ or create your own locally with:
|
|||
|
||||
```sh
|
||||
./dev_scripts/env.py --distro fedora --version 41 build-dev
|
||||
./dev_scripts/env.py --distro fedora --version 41 run --dev bash
|
||||
cd dangerzone
|
||||
```
|
||||
|
||||
Build the latest container:
|
||||
# Build the latest container (skip if already built):
|
||||
./dev_scripts/env.py --distro fedora --version 41 run --dev bash -c "cd dangerzone && poetry run ./install/common/build-image.py"
|
||||
|
||||
```sh
|
||||
python3 ./install/common/build-image.py
|
||||
```
|
||||
|
||||
Copy the container image to the assets folder on `dangerzone-$VERSION-i686.tar.gz`.
|
||||
|
||||
Create a .rpm:
|
||||
|
||||
```sh
|
||||
./install/linux/build-rpm.py
|
||||
# Create a .rpm:
|
||||
./dev_scripts/env.py --distro fedora --version 41 run --dev bash -c "cd dangerzone && ./install/linux/build-rpm.py"
|
||||
```
|
||||
|
||||
Publish the .rpm under `./dist` to the
|
||||
|
@ -413,7 +296,7 @@ Publish the .rpm under `./dist` to the
|
|||
Create a .rpm for Qubes:
|
||||
|
||||
```sh
|
||||
./install/linux/build-rpm.py --qubes
|
||||
./dev_scripts/env.py --distro fedora --version 41 run --dev bash -c "cd dangerzone && ./install/linux/build-rpm.py --qubes"
|
||||
```
|
||||
|
||||
and similarly publish it to the [`freedomofpress/yum-tools-prod`](https://github.com/freedomofpress/yum-tools-prod)
|
||||
|
@ -421,36 +304,37 @@ repo.
|
|||
|
||||
## Publishing the Release
|
||||
|
||||
To publish the release:
|
||||
To publish the release, you can follow these steps:
|
||||
|
||||
- [ ] Create an archive of the Dangerzone source in `tar.gz` format:
|
||||
* You can use the following command:
|
||||
|
||||
```
|
||||
export DZ_VERSION=$(cat share/version.txt)
|
||||
git archive --format=tar.gz -o dangerzone-${DZ_VERSION:?}.tar.gz --prefix=dangerzone/ v${DZ_VERSION:?}
|
||||
```bash
|
||||
export VERSION=$(cat share/version.txt)
|
||||
git archive --format=tar.gz -o dangerzone-${VERSION:?}.tar.gz --prefix=dangerzone/ v${VERSION:?}
|
||||
```
|
||||
|
||||
- [ ] Run container scan on the produced container images (some time may have passed since the artifacts were built)
|
||||
```
|
||||
```bash
|
||||
gunzip --keep -c ./share/container.tar.gz > /tmp/container.tar
|
||||
docker pull anchore/grype:latest
|
||||
docker run --rm -v /tmp/container.tar:/container.tar anchore/grype:latest /container.tar
|
||||
```
|
||||
|
||||
- [ ] Collect the assets in a single directory, calculate their SHA-256 hashes, and sign them.
|
||||
* You can use `./dev_scripts/sign-assets.py`, if you want to automate this
|
||||
task.
|
||||
- [ ] Create a new **draft** release on GitHub and upload the macOS and Windows installers.
|
||||
* Copy the release notes text from the template at [`docs/templates/release-notes`](https://github.com/freedomofpress/dangerzone/tree/main/docs/templates/)
|
||||
* You can use `./dev_scripts/upload-asset.py`, if you want to upload an asset
|
||||
using an access token.
|
||||
- [ ] Upload the `container-$VERSION-i686.tar.gz` and `container-$VERSION-arm64.tar.gz` images that were created in the previous step
|
||||
There is an `./dev_scripts/sign-assets.py` script to automate this task.
|
||||
|
||||
**Important:** Make sure that it's the same container images as the ones that
|
||||
are shipped in other platforms (see our [Pre-release](#Pre-release) section)
|
||||
**Important:** Before running the script, make sure that it's the same container images as
|
||||
the ones that are shipped in other platforms (see our [Pre-release](#Pre-release) section)
|
||||
|
||||
```bash
|
||||
# Sign all the assets
|
||||
./dev_scripts/sign-assets.py ~/release-assets/$VERSION/github --version $VERSION
|
||||
```
|
||||
|
||||
- [ ] Upload all the assets to the draft release on GitHub.
|
||||
```bash
|
||||
find ~/release-assets/$VERSION/github | xargs -n1 ./dev_scripts/upload-asset.py --token ~/token --draft
|
||||
```
|
||||
|
||||
- [ ] Upload the detached signatures (.asc) and checksum file.
|
||||
- [ ] Update the [Dangerzone website](https://github.com/freedomofpress/dangerzone.rocks) to link to the new installers.
|
||||
- [ ] Update the brew cask release of Dangerzone with a [PR like this one](https://github.com/Homebrew/homebrew-cask/pull/116319)
|
||||
- [ ] Update version and download links in `README.md`
|
||||
|
|
|
@ -500,6 +500,7 @@ class WaitingWidgetContainer(WaitingWidget):
|
|||
error: Optional[str] = None
|
||||
|
||||
try:
|
||||
assert isinstance(self.dangerzone.isolation_provider, (Dummy, Container))
|
||||
self.dangerzone.isolation_provider.is_runtime_available()
|
||||
except NoContainerTechException as e:
|
||||
log.error(str(e))
|
||||
|
|
|
@ -93,10 +93,6 @@ class IsolationProvider(ABC):
|
|||
else:
|
||||
self.proc_stderr = subprocess.DEVNULL
|
||||
|
||||
@staticmethod
|
||||
def is_runtime_available() -> bool:
|
||||
return True
|
||||
|
||||
@abstractmethod
|
||||
def install(self) -> bool:
|
||||
pass
|
||||
|
|
|
@ -1,11 +1,12 @@
|
|||
import gzip
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import platform
|
||||
import shlex
|
||||
import shutil
|
||||
import subprocess
|
||||
from typing import List, Tuple
|
||||
from typing import Dict, List, Tuple
|
||||
|
||||
from ..document import Document
|
||||
from ..util import get_resource_path, get_subprocess_startupinfo
|
||||
|
@ -155,16 +156,81 @@ class Container(IsolationProvider):
|
|||
return security_args
|
||||
|
||||
@staticmethod
|
||||
def install() -> bool:
|
||||
"""
|
||||
Make sure the podman container is installed. Linux only.
|
||||
"""
|
||||
if Container.is_container_installed():
|
||||
return True
|
||||
def list_image_tags() -> Dict[str, str]:
|
||||
"""Get the tags of all loaded Dangerzone images.
|
||||
|
||||
# Load the container into podman
|
||||
This method returns a mapping of image tags to image IDs, for all Dangerzone
|
||||
images. This can be useful when we want to find which are the local image tags,
|
||||
and which image ID does the "latest" tag point to.
|
||||
"""
|
||||
images = json.loads(
|
||||
subprocess.check_output(
|
||||
[
|
||||
Container.get_runtime(),
|
||||
"image",
|
||||
"list",
|
||||
"--format",
|
||||
"json",
|
||||
Container.CONTAINER_NAME,
|
||||
],
|
||||
text=True,
|
||||
startupinfo=get_subprocess_startupinfo(),
|
||||
)
|
||||
)
|
||||
|
||||
# Grab every image name and associate it with an image ID.
|
||||
tags = {}
|
||||
for image in images:
|
||||
for name in image["Names"]:
|
||||
tag = name.split(":")[1]
|
||||
tags[tag] = image["Id"]
|
||||
|
||||
return tags
|
||||
|
||||
@staticmethod
|
||||
def delete_image_tag(tag: str) -> None:
|
||||
"""Delete a Dangerzone image tag."""
|
||||
name = Container.CONTAINER_NAME + ":" + tag
|
||||
log.warning(f"Deleting old container image: {name}")
|
||||
try:
|
||||
subprocess.check_output(
|
||||
[Container.get_runtime(), "rmi", "--force", name],
|
||||
startupinfo=get_subprocess_startupinfo(),
|
||||
)
|
||||
except Exception as e:
|
||||
log.warning(
|
||||
f"Couldn't delete old container image '{name}', so leaving it there."
|
||||
f" Original error: {e}"
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def add_image_tag(cur_tag: str, new_tag: str) -> None:
|
||||
"""Add a tag to an existing Dangerzone image."""
|
||||
cur_image_name = Container.CONTAINER_NAME + ":" + cur_tag
|
||||
new_image_name = Container.CONTAINER_NAME + ":" + new_tag
|
||||
subprocess.check_output(
|
||||
[
|
||||
Container.get_runtime(),
|
||||
"tag",
|
||||
cur_image_name,
|
||||
new_image_name,
|
||||
],
|
||||
startupinfo=get_subprocess_startupinfo(),
|
||||
)
|
||||
|
||||
log.info(
|
||||
f"Successfully tagged container image '{cur_image_name}' as '{new_image_name}'"
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_expected_tag() -> str:
|
||||
"""Get the tag of the Dangerzone image tarball from the image-id.txt file."""
|
||||
with open(get_resource_path("image-id.txt")) as f:
|
||||
return f.read().strip()
|
||||
|
||||
@staticmethod
|
||||
def load_image_tarball() -> None:
|
||||
log.info("Installing Dangerzone container image...")
|
||||
|
||||
p = subprocess.Popen(
|
||||
[Container.get_runtime(), "load"],
|
||||
stdin=subprocess.PIPE,
|
||||
|
@ -191,10 +257,54 @@ class Container(IsolationProvider):
|
|||
f"Could not install container image: {error}"
|
||||
)
|
||||
|
||||
if not Container.is_container_installed(raise_on_error=True):
|
||||
return False
|
||||
log.info("Successfully installed container image from")
|
||||
|
||||
log.info("Container image installed")
|
||||
@staticmethod
|
||||
def install() -> bool:
|
||||
"""Install the container image tarball, or verify that it's already installed.
|
||||
|
||||
Perform the following actions:
|
||||
1. Get the tags of any locally available images that match Dangerzone's image
|
||||
name.
|
||||
2. Get the expected image tag from the image-id.txt file.
|
||||
- If this tag is present in the local images, and that image is also tagged
|
||||
as "latest", then we can return.
|
||||
- Else, prune the older container images and continue.
|
||||
3. Load the image tarball and make sure it matches the expected tag.
|
||||
4. Tag that image as "latest", and mark the installation as finished.
|
||||
"""
|
||||
old_tags = Container.list_image_tags()
|
||||
expected_tag = Container.get_expected_tag()
|
||||
|
||||
if expected_tag not in old_tags:
|
||||
# Prune older container images.
|
||||
log.info(
|
||||
f"Could not find a Dangerzone container image with tag '{expected_tag}'"
|
||||
)
|
||||
for tag in old_tags.keys():
|
||||
Container.delete_image_tag(tag)
|
||||
elif old_tags[expected_tag] != old_tags.get("latest"):
|
||||
log.info(f"The expected tag '{expected_tag}' is not the latest one")
|
||||
Container.add_image_tag(expected_tag, "latest")
|
||||
return True
|
||||
else:
|
||||
return True
|
||||
|
||||
# Load the image tarball into the container runtime.
|
||||
Container.load_image_tarball()
|
||||
|
||||
# Check that the container image has the expected image tag.
|
||||
# See https://github.com/freedomofpress/dangerzone/issues/988 for an example
|
||||
# where this was not the case.
|
||||
new_tags = Container.list_image_tags()
|
||||
if expected_tag not in new_tags:
|
||||
raise ImageNotPresentException(
|
||||
f"Could not find expected tag '{expected_tag}' after loading the"
|
||||
" container image tarball"
|
||||
)
|
||||
|
||||
# Mark the expected tag as "latest".
|
||||
Container.add_image_tag(expected_tag, "latest")
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
|
@ -213,58 +323,6 @@ class Container(IsolationProvider):
|
|||
raise NotAvailableContainerTechException(runtime_name, stderr.decode())
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def is_container_installed(raise_on_error: bool = False) -> bool:
|
||||
"""
|
||||
See if the container is installed.
|
||||
"""
|
||||
# Get the image id
|
||||
with open(get_resource_path("image-id.txt")) as f:
|
||||
expected_image_ids = f.read().strip().split()
|
||||
|
||||
# See if this image is already installed
|
||||
installed = False
|
||||
found_image_id = subprocess.check_output(
|
||||
[
|
||||
Container.get_runtime(),
|
||||
"image",
|
||||
"list",
|
||||
"--format",
|
||||
"{{.ID}}",
|
||||
Container.CONTAINER_NAME,
|
||||
],
|
||||
text=True,
|
||||
startupinfo=get_subprocess_startupinfo(),
|
||||
)
|
||||
found_image_id = found_image_id.strip()
|
||||
|
||||
if found_image_id in expected_image_ids:
|
||||
installed = True
|
||||
elif found_image_id == "":
|
||||
if raise_on_error:
|
||||
raise ImageNotPresentException(
|
||||
"Image is not listed after installation. Bailing out."
|
||||
)
|
||||
else:
|
||||
msg = (
|
||||
f"{Container.CONTAINER_NAME} images found, but IDs do not match."
|
||||
f" Found: {found_image_id}, Expected: {','.join(expected_image_ids)}"
|
||||
)
|
||||
if raise_on_error:
|
||||
raise ImageNotPresentException(msg)
|
||||
log.info(msg)
|
||||
log.info("Deleting old dangerzone container image")
|
||||
|
||||
try:
|
||||
subprocess.check_output(
|
||||
[Container.get_runtime(), "rmi", "--force", found_image_id],
|
||||
startupinfo=get_subprocess_startupinfo(),
|
||||
)
|
||||
except Exception:
|
||||
log.warning("Couldn't delete old container image, so leaving it there")
|
||||
|
||||
return installed
|
||||
|
||||
def doc_to_pixels_container_name(self, document: Document) -> str:
|
||||
"""Unique container name for the doc-to-pixels phase."""
|
||||
return f"dangerzone-doc-to-pixels-{document.id}"
|
||||
|
|
|
@ -39,6 +39,10 @@ class Dummy(IsolationProvider):
|
|||
def install(self) -> bool:
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def is_runtime_available() -> bool:
|
||||
return True
|
||||
|
||||
def start_doc_to_pixels_proc(self, document: Document) -> subprocess.Popen:
|
||||
cmd = [
|
||||
sys.executable,
|
||||
|
|
67
dev_scripts/generate-release-tasks.py
Executable file
67
dev_scripts/generate-release-tasks.py
Executable file
|
@ -0,0 +1,67 @@
|
|||
#!/usr/bin/env python3
|
||||
import pathlib
|
||||
import subprocess
|
||||
|
||||
RELEASE_FILE = "RELEASE.md"
|
||||
QA_FILE = "QA.md"
|
||||
|
||||
|
||||
def git_root():
|
||||
"""Get the root directory of the Git repo."""
|
||||
# FIXME: Use a Git Python binding for this.
|
||||
# FIXME: Make this work if called outside the repo.
|
||||
path = (
|
||||
subprocess.run(
|
||||
["git", "rev-parse", "--show-toplevel"],
|
||||
check=True,
|
||||
stdout=subprocess.PIPE,
|
||||
)
|
||||
.stdout.decode()
|
||||
.strip("\n")
|
||||
)
|
||||
return pathlib.Path(path)
|
||||
|
||||
|
||||
def extract_checkboxes(filename):
|
||||
headers = []
|
||||
result = []
|
||||
|
||||
with open(filename, "r") as f:
|
||||
lines = f.readlines()
|
||||
|
||||
current_level = 0
|
||||
for line in lines:
|
||||
line = line.rstrip()
|
||||
|
||||
# If it's a header, store it
|
||||
if line.startswith("#"):
|
||||
# Count number of # to determine header level
|
||||
level = len(line) - len(line.lstrip("#"))
|
||||
if level < current_level or not current_level:
|
||||
headers.extend(["", line, ""])
|
||||
current_level = level
|
||||
elif level > current_level:
|
||||
continue
|
||||
else:
|
||||
headers = ["", line, ""]
|
||||
|
||||
# If it's a checkbox
|
||||
elif "- [ ]" in line or "- [x]" in line or "- [X]" in line:
|
||||
# Print the last header if we haven't already
|
||||
if headers:
|
||||
result.extend(headers)
|
||||
headers = []
|
||||
current_level = 0
|
||||
|
||||
# If this is the "Do the QA tasks" line, recursively get QA tasks
|
||||
if "Do the QA tasks" in line:
|
||||
result.append(line)
|
||||
qa_tasks = extract_checkboxes(git_root() / QA_FILE)
|
||||
result.append(qa_tasks)
|
||||
else:
|
||||
result.append(line)
|
||||
return "\n".join(result)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
print(extract_checkboxes(git_root() / RELEASE_FILE))
|
|
@ -20,17 +20,32 @@ EOL_PYTHON_URL = "https://endoflife.date/api/python.json"
|
|||
CONTENT_QA = r"""## QA
|
||||
|
||||
To ensure that new releases do not introduce regressions, and support existing
|
||||
and newer platforms, we have to do the following:
|
||||
and newer platforms, we have to test that the produced packages work as expected.
|
||||
|
||||
Check the following:
|
||||
|
||||
- [ ] Make sure that the tip of the `main` branch passes the CI tests.
|
||||
- [ ] Make sure that the Apple account has a valid application password and has
|
||||
agreed to the latest Apple terms (see [macOS release](#macos-release)
|
||||
section).
|
||||
|
||||
Because it is repetitive, we wrote a script to help with the QA.
|
||||
It can run the tasks for you, pausing when it needs manual intervention.
|
||||
|
||||
You can run it with a command like:
|
||||
|
||||
```bash
|
||||
poetry run ./dev_scripts/qa.py {distro}-{version}
|
||||
```
|
||||
|
||||
### The checklist
|
||||
|
||||
- [ ] Create a test build in Windows and make sure it works:
|
||||
- [ ] Check if the suggested Python version is still supported.
|
||||
- [ ] Create a new development environment with Poetry.
|
||||
- [ ] Build the container image and ensure the development environment uses
|
||||
the new image.
|
||||
- [ ] Download the OCR language data using `./install/common/download-tessdata.py`
|
||||
- [ ] Run the Dangerzone tests.
|
||||
- [ ] Build and run the Dangerzone .exe
|
||||
- [ ] Test some QA scenarios (see [Scenarios](#Scenarios) below).
|
||||
|
@ -39,6 +54,7 @@ and newer platforms, we have to do the following:
|
|||
- [ ] Create a new development environment with Poetry.
|
||||
- [ ] Build the container image and ensure the development environment uses
|
||||
the new image.
|
||||
- [ ] Download the OCR language data using `./install/common/download-tessdata.py`
|
||||
- [ ] Run the Dangerzone tests.
|
||||
- [ ] Create and run an app bundle.
|
||||
- [ ] Test some QA scenarios (see [Scenarios](#Scenarios) below).
|
||||
|
@ -47,6 +63,7 @@ and newer platforms, we have to do the following:
|
|||
- [ ] Create a new development environment with Poetry.
|
||||
- [ ] Build the container image and ensure the development environment uses
|
||||
the new image.
|
||||
- [ ] Download the OCR language data using `./install/common/download-tessdata.py`
|
||||
- [ ] Run the Dangerzone tests.
|
||||
- [ ] Create and run an app bundle.
|
||||
- [ ] Test some QA scenarios (see [Scenarios](#Scenarios) below).
|
||||
|
@ -55,6 +72,7 @@ and newer platforms, we have to do the following:
|
|||
- [ ] Create a new development environment with Poetry.
|
||||
- [ ] Build the container image and ensure the development environment uses
|
||||
the new image.
|
||||
- [ ] Download the OCR language data using `./install/common/download-tessdata.py`
|
||||
- [ ] Run the Dangerzone tests.
|
||||
- [ ] Create a .deb package and install it system-wide.
|
||||
- [ ] Test some QA scenarios (see [Scenarios](#Scenarios) below).
|
||||
|
@ -63,6 +81,7 @@ and newer platforms, we have to do the following:
|
|||
- [ ] Create a new development environment with Poetry.
|
||||
- [ ] Build the container image and ensure the development environment uses
|
||||
the new image.
|
||||
- [ ] Download the OCR language data using `./install/common/download-tessdata.py`
|
||||
- [ ] Run the Dangerzone tests.
|
||||
- [ ] Create an .rpm package and install it system-wide.
|
||||
- [ ] Test some QA scenarios (see [Scenarios](#Scenarios) below).
|
||||
|
@ -108,9 +127,10 @@ Close the Dangerzone application and get the container image for that
|
|||
version. For example:
|
||||
|
||||
```
|
||||
$ docker images dangerzone.rocks/dangerzone:latest
|
||||
$ docker images dangerzone.rocks/dangerzone
|
||||
REPOSITORY TAG IMAGE ID CREATED SIZE
|
||||
dangerzone.rocks/dangerzone latest <image ID> <date> <size>
|
||||
dangerzone.rocks/dangerzone <tag> <image ID> <date> <size>
|
||||
```
|
||||
|
||||
Then run the version under QA and ensure that the settings remain changed.
|
||||
|
@ -119,9 +139,10 @@ Afterwards check that new docker image was installed by running the same command
|
|||
and seeing the following differences:
|
||||
|
||||
```
|
||||
$ docker images dangerzone.rocks/dangerzone:latest
|
||||
$ docker images dangerzone.rocks/dangerzone
|
||||
REPOSITORY TAG IMAGE ID CREATED SIZE
|
||||
dangerzone.rocks/dangerzone latest <different ID> <newer date> <different size>
|
||||
dangerzone.rocks/dangerzone <other tag> <different ID> <newer date> <different size>
|
||||
```
|
||||
|
||||
#### 4. Dangerzone successfully installs the container image
|
||||
|
@ -553,7 +574,7 @@ class Reference:
|
|||
# Convert spaces to dashes
|
||||
anchor = anchor.replace(" ", "-")
|
||||
# Remove non-alphanumeric (except dash and underscore)
|
||||
anchor = re.sub("[^a-zA-Z\-_]", "", anchor)
|
||||
anchor = re.sub("[^a-zA-Z-_]", "", anchor)
|
||||
|
||||
return anchor
|
||||
|
||||
|
@ -572,8 +593,8 @@ class QABase(abc.ABC):
|
|||
|
||||
platforms = {}
|
||||
|
||||
REF_QA = Reference("RELEASE.md", content=CONTENT_QA)
|
||||
REF_QA_SCENARIOS = Reference("RELEASE.md", content=CONTENT_QA_SCENARIOS)
|
||||
REF_QA = Reference("QA.md", content=CONTENT_QA)
|
||||
REF_QA_SCENARIOS = Reference("QA.md", content=CONTENT_QA_SCENARIOS)
|
||||
|
||||
# The following class method is available since Python 3.6. For more details, see:
|
||||
# https://docs.python.org/3.6/whatsnew/3.6.html#pep-487-simpler-customization-of-class-creation
|
||||
|
@ -1051,6 +1072,10 @@ class QAFedora(QALinux):
|
|||
)
|
||||
|
||||
|
||||
class QAFedora41(QAFedora):
|
||||
VERSION = "41"
|
||||
|
||||
|
||||
class QAFedora40(QAFedora):
|
||||
VERSION = "40"
|
||||
|
||||
|
|
58
docs/developer/doit.md
Normal file
58
docs/developer/doit.md
Normal file
|
@ -0,0 +1,58 @@
|
|||
# Using the Doit Automation Tool
|
||||
|
||||
Developers can use the [Doit](https://pydoit.org/) automation tool to create
|
||||
release artifacts. The purpose of the tool is to automate our manual release
|
||||
instructions in `RELEASE.md` file. Not everything is automated yet, since we're
|
||||
still experimenting with this tool. You can find our task definitions in this
|
||||
repo's `dodo.py` file.
|
||||
|
||||
## Why Doit?
|
||||
|
||||
We picked Doit out of the various tools out there for the following reasons:
|
||||
|
||||
* **Pythonic:** The configuration file and tasks can be written in Python. Where
|
||||
applicable, it's easy to issue shell commands as well.
|
||||
* **File targets:** Doit borrows the file target concept from Makefiles. Tasks
|
||||
can have file dependencies, and targets they build. This makes it easy to
|
||||
define a dependency graph (DAG) for tasks.
|
||||
* **Hash-based caching:** Unlike Makefiles, doit does not look at the
|
||||
modification timestamp of source/target files, to figure out if it needs to
|
||||
run them. Instead, it hashes those files, and will run a task only if the
|
||||
hash of a file dependency has changed.
|
||||
* **Parallelization:** Tasks can be run in parallel with the `-n` argument,
|
||||
which is similar to `make`'s `-j` argument.
|
||||
|
||||
## How to Doit?
|
||||
|
||||
First, enter your Poetry shell. Then, make sure that your environment is clean,
|
||||
and you have ample disk space. You can run:
|
||||
|
||||
```bash
|
||||
doit clean --dry-run # if you want to see what would happen
|
||||
doit clean # you'll be asked to cofirm that you want to clean everything
|
||||
```
|
||||
|
||||
Finally, you can build all the release artifacts with `doit`, or a specific task
|
||||
with:
|
||||
|
||||
```
|
||||
doit <task>
|
||||
```
|
||||
|
||||
## Tips and tricks
|
||||
|
||||
* You can run `doit list --all -s` to see the full list of tasks, their
|
||||
dependencies, and whether they are up to date.
|
||||
* You can run `doit info <task>` to see which dependencies are missing.
|
||||
* You can change this line in `pyproject.toml` to `true`, to allow using the
|
||||
Docker/Podman build cache:
|
||||
|
||||
```
|
||||
use_cache = true
|
||||
```
|
||||
|
||||
* You can pass the following global parameters with `doit <param>=<value>`:
|
||||
- `runtime`: The container runtime to use. Either `podman` or `docker`
|
||||
- `release_dir`: Where to store the release artifacts. Default path is
|
||||
`~/release-assets/<version>`
|
||||
- `apple_id`: The Apple ID to use when signing/notarizing the macOS DMG.
|
405
dodo.py
Normal file
405
dodo.py
Normal file
|
@ -0,0 +1,405 @@
|
|||
import json
|
||||
import os
|
||||
import platform
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
from doit import get_var
|
||||
from doit.action import CmdAction
|
||||
|
||||
ARCH = "arm64" if platform.machine() == "arm64" else "i686"
|
||||
VERSION = open("share/version.txt").read().strip()
|
||||
FEDORA_VERSIONS = ["40", "41"]
|
||||
DEBIAN_VERSIONS = ["bullseye", "focal", "jammy", "mantic", "noble", "trixie"]
|
||||
|
||||
### Global parameters
|
||||
#
|
||||
# Read more about global parameters in
|
||||
# https://pydoit.org/task-args.html#command-line-variables-doit-get-var
|
||||
|
||||
CONTAINER_RUNTIME = get_var("runtime", "podman")
|
||||
DEFAULT_RELEASE_DIR = Path.home() / "release-assets" / VERSION
|
||||
# XXX: Workaround for https://github.com/pydoit/doit/issues/164
|
||||
RELEASE_DIR = Path(get_var("release_dir", None) or DEFAULT_RELEASE_DIR)
|
||||
APPLE_ID = get_var("apple_id", None)
|
||||
|
||||
### Task Parameters
|
||||
|
||||
PARAM_APPLE_ID = {
|
||||
"name": "apple_id",
|
||||
"long": "apple-id",
|
||||
"default": APPLE_ID,
|
||||
"help": "The Apple developer ID that will be used for signing the .dmg",
|
||||
}
|
||||
|
||||
PARAM_USE_CACHE = {
|
||||
"name": "use_cache",
|
||||
"long": "use-cache",
|
||||
"help": (
|
||||
"Whether to use cached results or not. For reproducibility reasons,"
|
||||
" it's best to leave it to false"
|
||||
),
|
||||
"default": False,
|
||||
}
|
||||
|
||||
### File dependencies
|
||||
#
|
||||
# Define all the file dependencies for our tasks in a single place, since some file
|
||||
# dependencies are shared between tasks.
|
||||
|
||||
|
||||
def list_files(path, recursive=False):
|
||||
"""List files in a directory, and optionally traverse into subdirectories."""
|
||||
filepaths = []
|
||||
for root, _, files in os.walk(path):
|
||||
for f in files:
|
||||
if f.endswith(".pyc"):
|
||||
continue
|
||||
filepaths.append(Path(root) / f)
|
||||
if not recursive:
|
||||
break
|
||||
return filepaths
|
||||
|
||||
|
||||
def list_language_data():
|
||||
"""List the expected language data that Dangerzone downloads and stores locally."""
|
||||
tessdata_dir = Path("share") / "tessdata"
|
||||
langs = json.loads(open(tessdata_dir.parent / "ocr-languages.json").read()).values()
|
||||
targets = [tessdata_dir / f"{lang}.traineddata" for lang in langs]
|
||||
targets.append(tessdata_dir)
|
||||
return targets
|
||||
|
||||
|
||||
TESSDATA_DEPS = ["install/common/download-tessdata.py", "share/ocr-languages.json"]
|
||||
TESSDATA_TARGETS = list_language_data()
|
||||
|
||||
IMAGE_DEPS = [
|
||||
"Dockerfile",
|
||||
"poetry.lock",
|
||||
*list_files("dangerzone/conversion"),
|
||||
"dangerzone/gvisor_wrapper/entrypoint.py",
|
||||
"install/common/build-image.py",
|
||||
]
|
||||
IMAGE_TARGETS = ["share/container.tar.gz", "share/image-id.txt"]
|
||||
|
||||
SOURCE_DEPS = [
|
||||
*list_files("assets"),
|
||||
*list_files("share"),
|
||||
*list_files("dangerzone", recursive=True),
|
||||
]
|
||||
|
||||
PYTHON_DEPS = ["poetry.lock", "pyproject.toml"]
|
||||
|
||||
DMG_DEPS = [
|
||||
*list_files("install/macos"),
|
||||
*TESSDATA_TARGETS,
|
||||
*IMAGE_TARGETS,
|
||||
*PYTHON_DEPS,
|
||||
*SOURCE_DEPS,
|
||||
]
|
||||
|
||||
LINUX_DEPS = [
|
||||
*list_files("install/linux"),
|
||||
*IMAGE_TARGETS,
|
||||
*PYTHON_DEPS,
|
||||
*SOURCE_DEPS,
|
||||
]
|
||||
|
||||
DEB_DEPS = [*LINUX_DEPS, *list_files("debian")]
|
||||
RPM_DEPS = [*LINUX_DEPS, *list_files("qubes")]
|
||||
|
||||
|
||||
def copy_dir(src, dst):
|
||||
"""Copy a directory to a destination dir, and overwrite it if it exists."""
|
||||
shutil.rmtree(dst, ignore_errors=True)
|
||||
shutil.copytree(src, dst)
|
||||
|
||||
|
||||
def create_release_dir():
|
||||
RELEASE_DIR.mkdir(parents=True, exist_ok=True)
|
||||
(RELEASE_DIR / "tmp").mkdir(exist_ok=True)
|
||||
|
||||
|
||||
def build_linux_pkg(distro, version, cwd, qubes=False):
|
||||
"""Generic command for building a .deb/.rpm in a Dangerzone dev environment."""
|
||||
pkg = "rpm" if distro == "fedora" else "deb"
|
||||
cmd = [
|
||||
"python3",
|
||||
"./dev_scripts/env.py",
|
||||
"--distro",
|
||||
distro,
|
||||
"--version",
|
||||
version,
|
||||
"run",
|
||||
"--no-gui",
|
||||
"--dev",
|
||||
f"./dangerzone/install/linux/build-{pkg}.py",
|
||||
]
|
||||
if qubes:
|
||||
cmd += ["--qubes"]
|
||||
return CmdAction(" ".join(cmd), cwd=cwd)
|
||||
|
||||
|
||||
def build_deb(cwd):
|
||||
"""Build a .deb package on Debian Bookworm."""
|
||||
return build_linux_pkg(distro="debian", version="bookworm", cwd=cwd)
|
||||
|
||||
|
||||
def build_rpm(version, cwd, qubes=False):
|
||||
"""Build an .rpm package on the requested Fedora distro."""
|
||||
return build_linux_pkg(distro="Fedora", version=version, cwd=cwd, qubes=qubes)
|
||||
|
||||
|
||||
### Tasks
|
||||
|
||||
|
||||
def task_clean_container_runtime():
|
||||
"""Clean the storage space of the container runtime."""
|
||||
return {
|
||||
"actions": None,
|
||||
"clean": [
|
||||
[CONTAINER_RUNTIME, "system", "prune", "-a", "-f"],
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def task_check_container_runtime():
|
||||
"""Test that the container runtime is ready."""
|
||||
return {
|
||||
"actions": [
|
||||
["which", CONTAINER_RUNTIME],
|
||||
[CONTAINER_RUNTIME, "ps"],
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def task_macos_check_cert():
|
||||
"""Test that the Apple developer certificate can be used."""
|
||||
return {
|
||||
"actions": [
|
||||
"xcrun notarytool history --apple-id %(apple_id)s --keychain-profile dz-notarytool-release-key"
|
||||
],
|
||||
"params": [PARAM_APPLE_ID],
|
||||
}
|
||||
|
||||
|
||||
def task_macos_check_system():
|
||||
"""Run macOS specific system checks, as well as the generic ones."""
|
||||
return {
|
||||
"actions": None,
|
||||
"task_dep": ["check_container_runtime", "macos_check_cert"],
|
||||
}
|
||||
|
||||
|
||||
def task_init_release_dir():
|
||||
"""Create a directory for release artifacts."""
|
||||
return {
|
||||
"actions": [create_release_dir],
|
||||
"clean": [f"rm -rf {RELEASE_DIR}"],
|
||||
}
|
||||
|
||||
|
||||
def task_download_tessdata():
|
||||
"""Download the Tesseract data using ./install/common/download-tessdata.py"""
|
||||
return {
|
||||
"actions": ["python install/common/download-tessdata.py"],
|
||||
"file_dep": TESSDATA_DEPS,
|
||||
"targets": TESSDATA_TARGETS,
|
||||
"clean": True,
|
||||
}
|
||||
|
||||
|
||||
def task_build_image():
|
||||
"""Build the container image using ./install/common/build-image.py"""
|
||||
img_src = "share/container.tar.gz"
|
||||
img_dst = RELEASE_DIR / f"container-{VERSION}-{ARCH}.tar.gz" # FIXME: Add arch
|
||||
img_id_src = "share/image-id.txt"
|
||||
img_id_dst = RELEASE_DIR / "image-id.txt" # FIXME: Add arch
|
||||
|
||||
return {
|
||||
"actions": [
|
||||
f"python install/common/build-image.py --use-cache=%(use_cache)s --runtime={CONTAINER_RUNTIME}",
|
||||
["cp", img_src, img_dst],
|
||||
["cp", img_id_src, img_id_dst],
|
||||
],
|
||||
"params": [PARAM_USE_CACHE],
|
||||
"file_dep": IMAGE_DEPS,
|
||||
"targets": [img_src, img_dst, img_id_src, img_id_dst],
|
||||
"task_dep": ["init_release_dir", "check_container_runtime"],
|
||||
"clean": True,
|
||||
}
|
||||
|
||||
|
||||
def task_poetry_install():
|
||||
"""Setup the Poetry environment"""
|
||||
return {"actions": ["poetry install --sync"], "clean": ["poetry env remove --all"]}
|
||||
|
||||
|
||||
def task_macos_build_dmg():
|
||||
"""Build the macOS .dmg file for Dangerzone."""
|
||||
dz_dir = RELEASE_DIR / "tmp" / "macos"
|
||||
dmg_src = dz_dir / "dist" / "Dangerzone.dmg"
|
||||
dmg_dst = RELEASE_DIR / f"Dangerzone-{VERSION}-{ARCH}.dmg" # FIXME: Add -arch
|
||||
|
||||
return {
|
||||
"actions": [
|
||||
(copy_dir, [".", dz_dir]),
|
||||
f"cd {dz_dir} && poetry run install/macos/build-app.py --with-codesign",
|
||||
(
|
||||
"xcrun notarytool submit --wait --apple-id %(apple_id)s"
|
||||
f" --keychain-profile dz-notarytool-release-key {dmg_src}"
|
||||
),
|
||||
f"xcrun stapler staple {dmg_src}",
|
||||
["cp", dmg_src, dmg_dst],
|
||||
["rm", "-rf", dz_dir],
|
||||
],
|
||||
"params": [PARAM_APPLE_ID],
|
||||
"file_dep": DMG_DEPS,
|
||||
"task_dep": [
|
||||
"macos_check_system",
|
||||
"init_release_dir",
|
||||
"poetry_install",
|
||||
"download_tessdata",
|
||||
],
|
||||
"targets": [dmg_src, dmg_dst],
|
||||
"clean": True,
|
||||
}
|
||||
|
||||
|
||||
def task_debian_env():
|
||||
"""Build a Debian Bookworm dev environment."""
|
||||
return {
|
||||
"actions": [
|
||||
[
|
||||
"python3",
|
||||
"./dev_scripts/env.py",
|
||||
"--distro",
|
||||
"debian",
|
||||
"--version",
|
||||
"bookworm",
|
||||
"build-dev",
|
||||
]
|
||||
],
|
||||
"task_dep": ["check_container_runtime"],
|
||||
}
|
||||
|
||||
|
||||
def task_debian_deb():
|
||||
"""Build a Debian package for Debian Bookworm."""
|
||||
dz_dir = RELEASE_DIR / "tmp" / "debian"
|
||||
deb_name = f"dangerzone_{VERSION}-1_amd64.deb"
|
||||
deb_src = dz_dir / "deb_dist" / deb_name
|
||||
deb_dst = RELEASE_DIR / deb_name
|
||||
|
||||
return {
|
||||
"actions": [
|
||||
(copy_dir, [".", dz_dir]),
|
||||
build_deb(cwd=dz_dir),
|
||||
["cp", deb_src, deb_dst],
|
||||
["rm", "-rf", dz_dir],
|
||||
],
|
||||
"file_dep": DEB_DEPS,
|
||||
"task_dep": ["init_release_dir", "debian_env"],
|
||||
"targets": [deb_dst],
|
||||
"clean": True,
|
||||
}
|
||||
|
||||
|
||||
def task_fedora_env():
|
||||
"""Build Fedora dev environments."""
|
||||
for version in FEDORA_VERSIONS:
|
||||
yield {
|
||||
"name": version,
|
||||
"doc": f"Build Fedora {version} dev environments",
|
||||
"actions": [
|
||||
[
|
||||
"python3",
|
||||
"./dev_scripts/env.py",
|
||||
"--distro",
|
||||
"fedora",
|
||||
"--version",
|
||||
version,
|
||||
"build-dev",
|
||||
],
|
||||
],
|
||||
"task_dep": ["check_container_runtime"],
|
||||
}
|
||||
|
||||
|
||||
def task_fedora_rpm():
|
||||
"""Build Fedora packages for every supported version."""
|
||||
for version in FEDORA_VERSIONS:
|
||||
for qubes in (True, False):
|
||||
qubes_ident = "-qubes" if qubes else ""
|
||||
qubes_desc = " for Qubes" if qubes else ""
|
||||
dz_dir = RELEASE_DIR / "tmp" / f"f{version}{qubes_ident}"
|
||||
rpm_names = [
|
||||
f"dangerzone{qubes_ident}-{VERSION}-1.fc{version}.x86_64.rpm",
|
||||
f"dangerzone{qubes_ident}-{VERSION}-1.fc{version}.src.rpm",
|
||||
]
|
||||
rpm_src = [dz_dir / "dist" / rpm_name for rpm_name in rpm_names]
|
||||
rpm_dst = [RELEASE_DIR / rpm_name for rpm_name in rpm_names]
|
||||
|
||||
yield {
|
||||
"name": version + qubes_ident,
|
||||
"doc": f"Build a Fedora {version} package{qubes_desc}",
|
||||
"actions": [
|
||||
(copy_dir, [".", dz_dir]),
|
||||
build_rpm(version, cwd=dz_dir, qubes=qubes),
|
||||
["cp", *rpm_src, RELEASE_DIR],
|
||||
["rm", "-rf", dz_dir],
|
||||
],
|
||||
"file_dep": RPM_DEPS,
|
||||
"task_dep": ["init_release_dir", f"fedora_env:{version}"],
|
||||
"targets": rpm_dst,
|
||||
"clean": True,
|
||||
}
|
||||
|
||||
|
||||
def task_git_archive():
|
||||
"""Build a Git archive of the repo."""
|
||||
target = f"{RELEASE_DIR}/dangerzone-{VERSION}.tar.gz"
|
||||
return {
|
||||
"actions": [
|
||||
f"git archive --format=tar.gz -o {target} --prefix=dangerzone/ v{VERSION}"
|
||||
],
|
||||
"targets": [target],
|
||||
"task_dep": ["init_release_dir"],
|
||||
}
|
||||
|
||||
|
||||
#######################################################################################
|
||||
#
|
||||
# END OF TASKS
|
||||
#
|
||||
# The following task should be the LAST one in the dodo file, so that it runs first when
|
||||
# running `do clean`.
|
||||
|
||||
|
||||
def clean_prompt():
|
||||
ans = input(
|
||||
f"""
|
||||
You have not specified a target to clean.
|
||||
This means that doit will clean the following targets:
|
||||
|
||||
* ALL the containers, images, and build cache in {CONTAINER_RUNTIME.capitalize()}
|
||||
* ALL the built targets and directories
|
||||
|
||||
For a full list of the targets that doit will clean, run: doit clean --dry-run
|
||||
|
||||
Are you sure you want to clean everything (y/N): \
|
||||
"""
|
||||
)
|
||||
if ans.lower() in ["yes", "y"]:
|
||||
return
|
||||
else:
|
||||
print("Exiting...")
|
||||
exit(1)
|
||||
|
||||
|
||||
def task_clean_prompt():
|
||||
"""Make sure that the user really wants to run the clean tasks."""
|
||||
return {
|
||||
"actions": None,
|
||||
"clean": [clean_prompt],
|
||||
}
|
|
@ -2,12 +2,13 @@ import argparse
|
|||
import gzip
|
||||
import os
|
||||
import platform
|
||||
import secrets
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
BUILD_CONTEXT = "dangerzone/"
|
||||
TAG = "dangerzone.rocks/dangerzone:latest"
|
||||
IMAGE_NAME = "dangerzone.rocks/dangerzone"
|
||||
REQUIREMENTS_TXT = "container-pip-requirements.txt"
|
||||
if platform.system() in ["Darwin", "Windows"]:
|
||||
CONTAINER_RUNTIME = "docker"
|
||||
|
@ -17,6 +18,17 @@ elif platform.system() == "Linux":
|
|||
ARCH = platform.machine()
|
||||
|
||||
|
||||
def str2bool(v):
|
||||
if isinstance(v, bool):
|
||||
return v
|
||||
if v.lower() in ("yes", "true", "t", "y", "1"):
|
||||
return True
|
||||
elif v.lower() in ("no", "false", "f", "n", "0"):
|
||||
return False
|
||||
else:
|
||||
raise argparse.ArgumentTypeError("Boolean value expected.")
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
|
@ -39,13 +51,39 @@ def main():
|
|||
)
|
||||
parser.add_argument(
|
||||
"--use-cache",
|
||||
action="store_true",
|
||||
type=str2bool,
|
||||
nargs="?",
|
||||
default=False,
|
||||
const=True,
|
||||
help="Use the builder's cache to speed up the builds (not suitable for release builds)",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
tarball_path = Path("share") / "container.tar.gz"
|
||||
image_id_path = Path("share") / "image-id.txt"
|
||||
|
||||
print(f"Building for architecture '{ARCH}'")
|
||||
|
||||
# Designate a unique tag for this image, depending on the Git commit it was created
|
||||
# from:
|
||||
# 1. If created from a Git tag (e.g., 0.8.0), the image tag will be `0.8.0`.
|
||||
# 2. If created from a commit, it will be something like `0.8.0-31-g6bdaa7a`.
|
||||
# 3. If the contents of the Git repo are dirty, we will append a unique identifier
|
||||
# for this run, something like `0.8.0-31-g6bdaa7a-fdcb` or `0.8.0-fdcb`.
|
||||
dirty_ident = secrets.token_hex(2)
|
||||
tag = (
|
||||
subprocess.check_output(
|
||||
["git", "describe", "--first-parent", f"--dirty=-{dirty_ident}"],
|
||||
)
|
||||
.decode()
|
||||
.strip()[1:] # remove the "v" prefix of the tag.
|
||||
)
|
||||
image_name_tagged = IMAGE_NAME + ":" + tag
|
||||
|
||||
print(f"Will tag the container image as '{image_name_tagged}'")
|
||||
with open(image_id_path, "w") as f:
|
||||
f.write(tag)
|
||||
|
||||
print("Exporting container pip dependencies")
|
||||
with ContainerPipDependencies():
|
||||
if not args.use_cache:
|
||||
|
@ -59,8 +97,11 @@ def main():
|
|||
check=True,
|
||||
)
|
||||
|
||||
# Build the container image, and tag it with two tags; the one we calculated
|
||||
# above, and the "latest" tag.
|
||||
print("Building container image")
|
||||
cache_args = [] if args.use_cache else ["--no-cache"]
|
||||
image_name_latest = IMAGE_NAME + ":latest"
|
||||
subprocess.run(
|
||||
[
|
||||
args.runtime,
|
||||
|
@ -74,7 +115,9 @@ def main():
|
|||
"-f",
|
||||
"Dockerfile",
|
||||
"--tag",
|
||||
TAG,
|
||||
image_name_latest,
|
||||
"--tag",
|
||||
image_name_tagged,
|
||||
],
|
||||
check=True,
|
||||
)
|
||||
|
@ -85,7 +128,7 @@ def main():
|
|||
[
|
||||
CONTAINER_RUNTIME,
|
||||
"save",
|
||||
TAG,
|
||||
image_name_tagged,
|
||||
],
|
||||
stdout=subprocess.PIPE,
|
||||
)
|
||||
|
@ -93,7 +136,7 @@ def main():
|
|||
print("Compressing container image")
|
||||
chunk_size = 4 << 20
|
||||
with gzip.open(
|
||||
"share/container.tar.gz",
|
||||
tarball_path,
|
||||
"wb",
|
||||
compresslevel=args.compress_level,
|
||||
) as gzip_f:
|
||||
|
@ -105,21 +148,6 @@ def main():
|
|||
break
|
||||
cmd.wait(5)
|
||||
|
||||
print("Looking up the image id")
|
||||
image_id = subprocess.check_output(
|
||||
[
|
||||
args.runtime,
|
||||
"image",
|
||||
"list",
|
||||
"--format",
|
||||
"{{.ID}}",
|
||||
TAG,
|
||||
],
|
||||
text=True,
|
||||
)
|
||||
with open("share/image-id.txt", "w") as f:
|
||||
f.write(image_id)
|
||||
|
||||
|
||||
class ContainerPipDependencies:
|
||||
"""Generates PIP dependencies within container"""
|
||||
|
|
39
poetry.lock
generated
39
poetry.lock
generated
|
@ -229,6 +229,17 @@ files = [
|
|||
[package.dependencies]
|
||||
colorama = {version = "*", markers = "platform_system == \"Windows\""}
|
||||
|
||||
[[package]]
|
||||
name = "cloudpickle"
|
||||
version = "3.1.0"
|
||||
description = "Pickler class to extend the standard pickle.Pickler functionality"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "cloudpickle-3.1.0-py3-none-any.whl", hash = "sha256:fe11acda67f61aaaec473e3afe030feb131d78a43461b718185363384f1ba12e"},
|
||||
{file = "cloudpickle-3.1.0.tar.gz", hash = "sha256:81a929b6e3c7335c863c771d673d105f02efdb89dfaba0c90495d1c64796601b"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "colorama"
|
||||
version = "0.4.6"
|
||||
|
@ -412,6 +423,24 @@ files = [
|
|||
{file = "cx_logging-3.2.1.tar.gz", hash = "sha256:812665ae5012680a6fe47095c3772bce638e47cf05b2c3483db3bdbe6b06da44"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "doit"
|
||||
version = "0.36.0"
|
||||
description = "doit - Automation Tool"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "doit-0.36.0-py3-none-any.whl", hash = "sha256:ebc285f6666871b5300091c26eafdff3de968a6bd60ea35dd1e3fc6f2e32479a"},
|
||||
{file = "doit-0.36.0.tar.gz", hash = "sha256:71d07ccc9514cb22fe59d98999577665eaab57e16f644d04336ae0b4bae234bc"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
cloudpickle = "*"
|
||||
importlib-metadata = ">=4.4"
|
||||
|
||||
[package.extras]
|
||||
toml = ["tomli"]
|
||||
|
||||
[[package]]
|
||||
name = "exceptiongroup"
|
||||
version = "1.2.2"
|
||||
|
@ -554,7 +583,6 @@ python-versions = ">=3.8"
|
|||
files = [
|
||||
{file = "lief-0.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a80246b96501b2b1d4927ceb3cb817eda9333ffa9e07101358929a6cffca5dae"},
|
||||
{file = "lief-0.15.1-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:84bf310710369544e2bb82f83d7fdab5b5ac422651184fde8bf9e35f14439691"},
|
||||
{file = "lief-0.15.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:517dc5dad31c754720a80a87ad9e6cb1e48223d4505980c2fd86072bd4f69001"},
|
||||
{file = "lief-0.15.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:8fb58efb77358291109d2675d5459399c0794475b497992d0ecee18a4a46a207"},
|
||||
{file = "lief-0.15.1-cp310-cp310-manylinux_2_33_aarch64.whl", hash = "sha256:d5852a246361bbefa4c1d5930741765a2337638d65cfe30de1b7d61f9a54b865"},
|
||||
{file = "lief-0.15.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:12e53dc0253c303df386ae45487a2f0078026602b36d0e09e838ae1d4dbef958"},
|
||||
|
@ -562,7 +590,6 @@ files = [
|
|||
{file = "lief-0.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ddf2ebd73766169594d631b35f84c49ef42871de552ad49f36002c60164d0aca"},
|
||||
{file = "lief-0.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:20508c52de0dffcee3242253541609590167a3e56150cbacb506fdbb822206ef"},
|
||||
{file = "lief-0.15.1-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:0750c892fd3b7161a3c2279f25fe1844427610c3a5a4ae23f65674ced6f93ea5"},
|
||||
{file = "lief-0.15.1-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:3e49bd595a8548683bead982bc15b064257fea3110fd15e22fb3feb17d97ad1c"},
|
||||
{file = "lief-0.15.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a8634ea79d6d9862297fadce025519ab25ff01fcadb333cf42967c6295f0d057"},
|
||||
{file = "lief-0.15.1-cp311-cp311-manylinux_2_33_aarch64.whl", hash = "sha256:1e11e046ad71fe8c81e1a8d1d207fe2b99c967d33ce79c3d3915cb8f5ecacf52"},
|
||||
{file = "lief-0.15.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:674b620cdf1d686f52450fd97c1056d4c92e55af8217ce85a1b2efaf5b32140b"},
|
||||
|
@ -570,15 +597,11 @@ files = [
|
|||
{file = "lief-0.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:e9b96a37bf11ca777ff305d85d957eabad2a92a6e577b6e2fb3ab79514e5a12e"},
|
||||
{file = "lief-0.15.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1a96f17c2085ef38d12ad81427ae8a5d6ad76f0bc62a1e1f5fe384255cd2cc94"},
|
||||
{file = "lief-0.15.1-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:d780af1762022b8e01b613253af490afea3864fbd6b5a49c6de7cea8fde0443d"},
|
||||
{file = "lief-0.15.1-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:536a4ecd46b295b3acac0d60a68d1646480b7761ade862c6c87ccbb41229fae3"},
|
||||
{file = "lief-0.15.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:d0f10d80202de9634a16786b53ba3a8f54ae8b9a9e124a964d83212444486087"},
|
||||
{file = "lief-0.15.1-cp312-cp312-manylinux_2_33_aarch64.whl", hash = "sha256:864f17ecf1736296e6d5fc38b11983f9d19a5e799f094e21e20d58bfb1b95b80"},
|
||||
{file = "lief-0.15.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c2ec738bcafee8a569741f4a749f0596823b12f10713306c7d0cbbf85759f51c"},
|
||||
{file = "lief-0.15.1-cp312-cp312-win32.whl", hash = "sha256:db38619edf70e27fb3686b8c0f0bec63ad494ac88ab51660c5ecd2720b506e41"},
|
||||
{file = "lief-0.15.1-cp312-cp312-win_amd64.whl", hash = "sha256:28bf0922de5fb74502a29cc47930d3a052df58dc23ab6519fa590e564f194a60"},
|
||||
{file = "lief-0.15.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0805301e8fef9b13da00c33c831fb0c05ea892309230f3a35551c2dfaf69b11d"},
|
||||
{file = "lief-0.15.1-cp313-cp313-macosx_11_0_x86_64.whl", hash = "sha256:7580defe140e921bc4f210e8a6cb115fcf2923f00d37800b1626168cbca95108"},
|
||||
{file = "lief-0.15.1-cp313-cp313-manylinux2014_aarch64.whl", hash = "sha256:c0119306b6a38759483136de7242b7c2e0a23f1de1d4ae53f12792c279607410"},
|
||||
{file = "lief-0.15.1-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:0616e6048f269d262ff93d67c497ebff3c1d3965ffb9427b0f2b474764fd2e8c"},
|
||||
{file = "lief-0.15.1-cp313-cp313-manylinux_2_33_aarch64.whl", hash = "sha256:6a08b2e512a80040429febddc777768c949bcd53f6f580e902e41ec0d9d936b8"},
|
||||
{file = "lief-0.15.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fcd489ff80860bcc2b2689faa330a46b6d66f0ee3e0f6ef9e643e2b996128a06"},
|
||||
|
@ -586,7 +609,6 @@ files = [
|
|||
{file = "lief-0.15.1-cp313-cp313-win_amd64.whl", hash = "sha256:5af7dcb9c3f44baaf60875df6ba9af6777db94776cc577ee86143bcce105ba2f"},
|
||||
{file = "lief-0.15.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f9757ff0c7c3d6f66e5fdcc6a9df69680fad0dc2707d64a3428f0825dfce1a85"},
|
||||
{file = "lief-0.15.1-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:8ac3cd099be2580d0e15150b1d2f5095c38f150af89993ddf390d7897ee8135f"},
|
||||
{file = "lief-0.15.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:e732619acc34943b504c867258fc0196f1931f72c2a627219d4f116a7acc726d"},
|
||||
{file = "lief-0.15.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:4dedeab498c312a29b58f16b739895f65fa54b2a21b8d98b111e99ad3f7e30a8"},
|
||||
{file = "lief-0.15.1-cp38-cp38-manylinux_2_33_aarch64.whl", hash = "sha256:b9217578f7a45f667503b271da8481207fb4edda8d4a53e869fb922df6030484"},
|
||||
{file = "lief-0.15.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:82e6308ad8bd4bc7eadee3502ede13a5bb398725f25513a0396c8dba850f58a1"},
|
||||
|
@ -594,7 +616,6 @@ files = [
|
|||
{file = "lief-0.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:a079a76bca23aa73c850ab5beb7598871a1bf44662658b952cead2b5ddd31bee"},
|
||||
{file = "lief-0.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:785a3aa14575f046ed9c8d44ea222ea14c697cd03b5331d1717b5b0cf4f72466"},
|
||||
{file = "lief-0.15.1-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:d7044553cf07c8a2ab6e21874f07585610d996ff911b9af71dc6085a89f59daa"},
|
||||
{file = "lief-0.15.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:fa020f3ed6e95bb110a4316af544021b74027d18bf4671339d4cffec27aa5884"},
|
||||
{file = "lief-0.15.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:13285c3ff5ef6de2421d85684c954905af909db0ad3472e33c475e5f0f657dcf"},
|
||||
{file = "lief-0.15.1-cp39-cp39-manylinux_2_33_aarch64.whl", hash = "sha256:932f880ee8a130d663a97a9099516d8570b1b303af7816e70a02f9931d5ef4c2"},
|
||||
{file = "lief-0.15.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:de9453f94866e0f2c36b6bd878625880080e7e5800788f5cbc06a76debf283b9"},
|
||||
|
@ -1189,4 +1210,4 @@ type = ["pytest-mypy"]
|
|||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.9,<3.13"
|
||||
content-hash = "5d1ff28aa04c3a814280e55c0b2a307efe5ca953cd4cb281056c35fd2e53fdf0"
|
||||
content-hash = "a2937fd8ead7b45da571cb943ab43918a9c6d3dcbc6935dc8d0af3d1d4190371"
|
||||
|
|
|
@ -34,6 +34,7 @@ setuptools = "*"
|
|||
cx_freeze = {version = "^7.2.5", platform = "win32"}
|
||||
pywin32 = {version = "*", platform = "win32"}
|
||||
pyinstaller = {version = "*", platform = "darwin"}
|
||||
doit = "^0.36.0"
|
||||
|
||||
# Dependencies required for linting the code.
|
||||
[tool.poetry.group.lint.dependencies]
|
||||
|
@ -66,6 +67,13 @@ skip_gitignore = true
|
|||
# This is necessary due to https://github.com/PyCQA/isort/issues/1835
|
||||
follow_links = false
|
||||
|
||||
[tool.doit]
|
||||
verbosity = 3
|
||||
|
||||
[tool.doit.tasks.build_image]
|
||||
# DO NOT change this to 'true' for release artifacts.
|
||||
use_cache = false
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core>=1.2.0"]
|
||||
build-backend = "poetry.core.masonry.api"
|
||||
|
|
|
@ -30,6 +30,7 @@ from dangerzone.isolation_provider.container import (
|
|||
NoContainerTechException,
|
||||
NotAvailableContainerTechException,
|
||||
)
|
||||
from dangerzone.isolation_provider.dummy import Dummy
|
||||
|
||||
from .test_updater import assert_report_equal, default_updater_settings
|
||||
|
||||
|
@ -510,9 +511,9 @@ def test_not_available_container_tech_exception(
|
|||
) -> None:
|
||||
# Setup
|
||||
mock_app = mocker.MagicMock()
|
||||
dummy = mocker.MagicMock()
|
||||
|
||||
dummy.is_runtime_available.side_effect = NotAvailableContainerTechException(
|
||||
dummy = Dummy()
|
||||
fn = mocker.patch.object(dummy, "is_runtime_available")
|
||||
fn.side_effect = NotAvailableContainerTechException(
|
||||
"podman", "podman image ls logs"
|
||||
)
|
||||
|
||||
|
|
|
@ -69,10 +69,11 @@ class TestContainer(IsolationProviderTest):
|
|||
"image",
|
||||
"list",
|
||||
"--format",
|
||||
"{{.ID}}",
|
||||
"json",
|
||||
"dangerzone.rocks/dangerzone",
|
||||
],
|
||||
occurrences=2,
|
||||
stdout="{}",
|
||||
)
|
||||
|
||||
# Make podman load fail
|
||||
|
@ -102,10 +103,11 @@ class TestContainer(IsolationProviderTest):
|
|||
"image",
|
||||
"list",
|
||||
"--format",
|
||||
"{{.ID}}",
|
||||
"json",
|
||||
"dangerzone.rocks/dangerzone",
|
||||
],
|
||||
occurrences=2,
|
||||
stdout="{}",
|
||||
)
|
||||
|
||||
# Patch gzip.open and podman load so that it works
|
||||
|
|
Loading…
Reference in a new issue