mirror of
https://github.com/freedomofpress/dangerzone.git
synced 2025-04-28 09:52:37 +02:00
Fix references to container.tar.gz
Find all references to the `container.tar.gz` file, and replace them with references to `container.tar`. Moreover, remove the `--no-save` argument of `build-image.py` since we now always save the image. Finally, fix some stale references to Poetry, which are not necessary anymore.
This commit is contained in:
parent
69234507c4
commit
51f432be6b
12 changed files with 72 additions and 89 deletions
13
.github/workflows/build.yml
vendored
13
.github/workflows/build.yml
vendored
|
@ -87,19 +87,12 @@ jobs:
|
|||
id: cache-container-image
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
key: v4-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/*.py', 'dangerzone/container_helpers/*', 'install/common/build-image.py') }}
|
||||
key: v5-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/*.py', 'dangerzone/container_helpers/*', 'install/common/build-image.py') }}
|
||||
path: |
|
||||
share/container.tar.gz
|
||||
share/container.tar
|
||||
share/image-id.txt
|
||||
|
||||
- name: Build and push Dangerzone image
|
||||
- name: Build Dangerzone image
|
||||
if: ${{ steps.cache-container-image.outputs.cache-hit != 'true' }}
|
||||
run: |
|
||||
sudo apt-get install -y python3-poetry
|
||||
python3 ./install/common/build-image.py
|
||||
echo ${{ github.token }} | podman login ghcr.io -u USERNAME --password-stdin
|
||||
gunzip -c share/container.tar.gz | podman load
|
||||
tag=$(cat share/image-id.txt)
|
||||
podman push \
|
||||
dangerzone.rocks/dangerzone:$tag \
|
||||
${{ env.IMAGE_REGISTRY }}/dangerzone/dangerzone:tag
|
||||
|
|
20
.github/workflows/ci.yml
vendored
20
.github/workflows/ci.yml
vendored
|
@ -59,9 +59,9 @@ jobs:
|
|||
id: cache-container-image
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
key: v4-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/*.py', 'dangerzone/container_helpers/*', 'install/common/build-image.py') }}
|
||||
key: v5-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/*.py', 'dangerzone/container_helpers/*', 'install/common/build-image.py') }}
|
||||
path: |-
|
||||
share/container.tar.gz
|
||||
share/container.tar
|
||||
share/image-id.txt
|
||||
|
||||
- name: Build Dangerzone container image
|
||||
|
@ -72,8 +72,8 @@ jobs:
|
|||
- name: Upload container image
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: container.tar.gz
|
||||
path: share/container.tar.gz
|
||||
name: container.tar
|
||||
path: share/container.tar
|
||||
|
||||
download-tessdata:
|
||||
name: Download and cache Tesseract data
|
||||
|
@ -226,9 +226,9 @@ jobs:
|
|||
- name: Restore container cache
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
key: v4-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/*.py', 'dangerzone/container_helpers/*', 'install/common/build-image.py') }}
|
||||
key: v5-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/*.py', 'dangerzone/container_helpers/*', 'install/common/build-image.py') }}
|
||||
path: |-
|
||||
share/container.tar.gz
|
||||
share/container.tar
|
||||
share/image-id.txt
|
||||
fail-on-cache-miss: true
|
||||
|
||||
|
@ -333,9 +333,9 @@ jobs:
|
|||
- name: Restore container image
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
key: v4-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/*.py', 'dangerzone/container_helpers/*', 'install/common/build-image.py') }}
|
||||
key: v5-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/*.py', 'dangerzone/container_helpers/*', 'install/common/build-image.py') }}
|
||||
path: |-
|
||||
share/container.tar.gz
|
||||
share/container.tar
|
||||
share/image-id.txt
|
||||
fail-on-cache-miss: true
|
||||
|
||||
|
@ -430,9 +430,9 @@ jobs:
|
|||
- name: Restore container image
|
||||
uses: actions/cache/restore@v4
|
||||
with:
|
||||
key: v4-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/*.py', 'dangerzone/container_helpers/*', 'install/common/build-image.py') }}
|
||||
key: v5-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/*.py', 'dangerzone/container_helpers/*', 'install/common/build-image.py') }}
|
||||
path: |-
|
||||
share/container.tar.gz
|
||||
share/container.tar
|
||||
share/image-id.txt
|
||||
fail-on-cache-miss: true
|
||||
|
||||
|
|
17
.github/workflows/scan.yml
vendored
17
.github/workflows/scan.yml
vendored
|
@ -21,19 +21,12 @@ jobs:
|
|||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Install container build dependencies
|
||||
run: |
|
||||
sudo apt install pipx
|
||||
pipx install poetry
|
||||
pipx inject poetry poetry-plugin-export
|
||||
poetry install --only package
|
||||
- name: Bump date of Debian snapshot archive
|
||||
run: |
|
||||
date=$(date "+%Y%m%d")
|
||||
sed -i "s/DEBIAN_ARCHIVE_DATE=[0-9]\+/DEBIAN_ARCHIVE_DATE=${date}/" Dockerfile.env
|
||||
make Dockerfile
|
||||
- name: Build container image
|
||||
run: python3 ./install/common/build-image.py --runtime docker --no-save
|
||||
run: |
|
||||
python3 ./install/common/build-image.py \
|
||||
--debian-archive-date $(date "+%Y%m%d") \
|
||||
--runtime docker
|
||||
docker load -i share/container.tar
|
||||
- name: Get image tag
|
||||
id: tag
|
||||
run: echo "tag=$(cat share/image-id.txt)" >> $GITHUB_OUTPUT
|
||||
|
|
|
@ -269,7 +269,7 @@ Our [GitHub Releases page](https://github.com/freedomofpress/dangerzone/releases
|
|||
hosts the following files:
|
||||
* Windows installer (`Dangerzone-<version>.msi`)
|
||||
* macOS archives (`Dangerzone-<version>-<arch>.dmg`)
|
||||
* Container images (`container-<version>-<arch>.tar.gz`)
|
||||
* Container images (`container-<version>-<arch>.tar`)
|
||||
* Source package (`dangerzone-<version>.tar.gz`)
|
||||
|
||||
All these files are accompanied by signatures (as `.asc` files). We'll explain
|
||||
|
@ -297,7 +297,7 @@ gpg --verify Dangerzone-0.6.1-i686.dmg.asc Dangerzone-0.6.1-i686.dmg
|
|||
For the container images:
|
||||
|
||||
```
|
||||
gpg --verify container-0.6.1-i686.tar.gz.asc container-0.6.1-i686.tar.gz
|
||||
gpg --verify container-0.6.1-i686.tar.asc container-0.6.1-i686.tar
|
||||
```
|
||||
|
||||
For the source package:
|
||||
|
|
|
@ -150,7 +150,7 @@ Here is what you need to do:
|
|||
poetry run ./install/common/download-tessdata.py
|
||||
|
||||
# Copy the container image to the assets folder
|
||||
cp share/container.tar.gz ~dz/release-assets/$VERSION/dangerzone-$VERSION-arm64.tar.gz
|
||||
cp share/container.tar ~dz/release-assets/$VERSION/dangerzone-$VERSION-arm64.tar
|
||||
cp share/image-id.txt ~dz/release-assets/$VERSION/.
|
||||
```
|
||||
|
||||
|
@ -227,7 +227,7 @@ The Windows release is performed in a Windows 11 virtual machine (as opposed to
|
|||
|
||||
- [ ] Copy the container image into the VM
|
||||
> [!IMPORTANT]
|
||||
> Instead of running `python .\install\windows\build-image.py` in the VM, run the build image script on the host (making sure to build for `linux/amd64`). Copy `share/container.tar.gz` and `share/image-id.txt` from the host into the `share` folder in the VM.
|
||||
> Instead of running `python .\install\windows\build-image.py` in the VM, run the build image script on the host (making sure to build for `linux/amd64`). Copy `share/container.tar` and `share/image-id.txt` from the host into the `share` folder in the VM.
|
||||
- [ ] Run `poetry run .\install\windows\build-app.bat`
|
||||
- [ ] When you're done you will have `dist\Dangerzone.msi`
|
||||
|
||||
|
@ -318,9 +318,8 @@ To publish the release, you can follow these steps:
|
|||
|
||||
- [ ] Run container scan on the produced container images (some time may have passed since the artifacts were built)
|
||||
```bash
|
||||
gunzip --keep -c ./share/container.tar.gz > /tmp/container.tar
|
||||
docker pull anchore/grype:latest
|
||||
docker run --rm -v /tmp/container.tar:/container.tar anchore/grype:latest /container.tar
|
||||
docker run --rm -v ./share/container.tar:/container.tar anchore/grype:latest /container.tar
|
||||
```
|
||||
|
||||
- [ ] Collect the assets in a single directory, calculate their SHA-256 hashes, and sign them.
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
import gzip
|
||||
import logging
|
||||
import platform
|
||||
import shutil
|
||||
|
@ -120,30 +119,23 @@ def get_expected_tag() -> str:
|
|||
|
||||
def load_image_tarball() -> None:
|
||||
log.info("Installing Dangerzone container image...")
|
||||
p = subprocess.Popen(
|
||||
[get_runtime(), "load"],
|
||||
stdin=subprocess.PIPE,
|
||||
startupinfo=get_subprocess_startupinfo(),
|
||||
)
|
||||
|
||||
chunk_size = 4 << 20
|
||||
compressed_container_path = get_resource_path("container.tar.gz")
|
||||
with gzip.open(compressed_container_path) as f:
|
||||
while True:
|
||||
chunk = f.read(chunk_size)
|
||||
if len(chunk) > 0:
|
||||
if p.stdin:
|
||||
p.stdin.write(chunk)
|
||||
tarball_path = get_resource_path("container.tar")
|
||||
with open(tarball_path) as f:
|
||||
try:
|
||||
subprocess.run(
|
||||
[get_runtime(), "load"],
|
||||
stdin=f,
|
||||
startupinfo=get_subprocess_startupinfo(),
|
||||
capture_output=True,
|
||||
check=True,
|
||||
)
|
||||
except subprocess.CalledProcessError as e:
|
||||
if e.stderr:
|
||||
error = e.stderr.decode()
|
||||
else:
|
||||
break
|
||||
_, err = p.communicate()
|
||||
if p.returncode < 0:
|
||||
if err:
|
||||
error = err.decode()
|
||||
else:
|
||||
error = "No output"
|
||||
raise errors.ImageInstallationException(
|
||||
f"Could not install container image: {error}"
|
||||
)
|
||||
error = "No output"
|
||||
raise errors.ImageInstallationException(
|
||||
f"Could not install container image: {error}"
|
||||
)
|
||||
|
||||
log.info("Successfully installed container image from")
|
||||
log.info("Successfully installed container image")
|
||||
|
|
|
@ -130,7 +130,7 @@ def is_qubes_native_conversion() -> bool:
|
|||
# This disambiguates if it is running a Qubes targetted build or not
|
||||
# (Qubes-specific builds don't ship the container image)
|
||||
|
||||
compressed_container_path = get_resource_path("container.tar.gz")
|
||||
return not os.path.exists(compressed_container_path)
|
||||
container_image_path = get_resource_path("container.tar")
|
||||
return not os.path.exists(container_image_path)
|
||||
else:
|
||||
return False
|
||||
|
|
|
@ -11,8 +11,8 @@ log = logging.getLogger(__name__)
|
|||
|
||||
|
||||
DZ_ASSETS = [
|
||||
"container-{version}-i686.tar.gz",
|
||||
"container-{version}-arm64.tar.gz",
|
||||
"container-{version}-i686.tar",
|
||||
"container-{version}-arm64.tar",
|
||||
"Dangerzone-{version}.msi",
|
||||
"Dangerzone-{version}-arm64.dmg",
|
||||
"Dangerzone-{version}-i686.dmg",
|
||||
|
|
6
dodo.py
6
dodo.py
|
@ -57,7 +57,7 @@ IMAGE_DEPS = [
|
|||
*list_files("dangerzone/container_helpers"),
|
||||
"install/common/build-image.py",
|
||||
]
|
||||
IMAGE_TARGETS = ["share/container.tar.gz", "share/image-id.txt"]
|
||||
IMAGE_TARGETS = ["share/container.tar", "share/image-id.txt"]
|
||||
|
||||
SOURCE_DEPS = [
|
||||
*list_files("assets"),
|
||||
|
@ -188,8 +188,8 @@ def task_download_tessdata():
|
|||
|
||||
def task_build_image():
|
||||
"""Build the container image using ./install/common/build-image.py"""
|
||||
img_src = "share/container.tar.gz"
|
||||
img_dst = RELEASE_DIR / f"container-{VERSION}-{ARCH}.tar.gz" # FIXME: Add arch
|
||||
img_src = "share/container.tar"
|
||||
img_dst = RELEASE_DIR / f"container-{VERSION}-{ARCH}.tar" # FIXME: Add arch
|
||||
img_id_src = "share/image-id.txt"
|
||||
img_id_dst = RELEASE_DIR / "image-id.txt" # FIXME: Add arch
|
||||
|
||||
|
|
|
@ -66,14 +66,14 @@ def build(build_dir, qubes=False):
|
|||
print("* Creating a Python sdist")
|
||||
tessdata = root / "share" / "tessdata"
|
||||
tessdata_bak = root / "tessdata.bak"
|
||||
container_tar_gz = root / "share" / "container.tar.gz"
|
||||
container_tar_gz_bak = root / "container.tar.gz.bak"
|
||||
container_tar = root / "share" / "container.tar"
|
||||
container_tar_bak = root / "container.tar.bak"
|
||||
|
||||
if tessdata.exists():
|
||||
tessdata.rename(tessdata_bak)
|
||||
stash_container = qubes and container_tar_gz.exists()
|
||||
if stash_container and container_tar_gz.exists():
|
||||
container_tar_gz.rename(container_tar_gz_bak)
|
||||
stash_container = qubes and container_tar.exists()
|
||||
if stash_container and container_tar.exists():
|
||||
container_tar.rename(container_tar_bak)
|
||||
try:
|
||||
subprocess.run(["poetry", "build", "-f", "sdist"], cwd=root, check=True)
|
||||
# Copy and unlink the Dangerzone sdist, instead of just renaming it. If the
|
||||
|
@ -84,8 +84,8 @@ def build(build_dir, qubes=False):
|
|||
finally:
|
||||
if tessdata_bak.exists():
|
||||
tessdata_bak.rename(tessdata)
|
||||
if stash_container and container_tar_gz_bak.exists():
|
||||
container_tar_gz_bak.rename(container_tar_gz)
|
||||
if stash_container and container_tar_bak.exists():
|
||||
container_tar_bak.rename(container_tar)
|
||||
|
||||
print("* Building RPM package")
|
||||
cmd = [
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
#
|
||||
# * Qubes packages include some extra files under /etc/qubes-rpc, whereas
|
||||
# regular RPM packages include the container image under
|
||||
# /usr/share/container.tar.gz
|
||||
# /usr/share/container.tar
|
||||
# * Qubes packages have some extra dependencies.
|
||||
# 3. It is best to consume this SPEC file using the `install/linux/build-rpm.py`
|
||||
# script, which handles the necessary scaffolding for building the package.
|
||||
|
|
|
@ -8,6 +8,7 @@ from pytest_subprocess import FakeProcess
|
|||
from dangerzone import container_utils, errors
|
||||
from dangerzone.isolation_provider.container import Container
|
||||
from dangerzone.isolation_provider.qubes import is_qubes_native_conversion
|
||||
from dangerzone.util import get_resource_path
|
||||
|
||||
from .base import IsolationProviderTermination, IsolationProviderTest
|
||||
|
||||
|
@ -47,7 +48,7 @@ class TestContainer(IsolationProviderTest):
|
|||
provider.is_available()
|
||||
|
||||
def test_install_raise_if_image_cant_be_installed(
|
||||
self, mocker: MockerFixture, provider: Container, fp: FakeProcess
|
||||
self, provider: Container, fp: FakeProcess
|
||||
) -> None:
|
||||
"""When an image installation fails, an exception should be raised"""
|
||||
|
||||
|
@ -68,11 +69,13 @@ class TestContainer(IsolationProviderTest):
|
|||
occurrences=2,
|
||||
)
|
||||
|
||||
# Make podman load fail
|
||||
mocker.patch("gzip.open", mocker.mock_open(read_data=""))
|
||||
|
||||
fp.register_subprocess(
|
||||
[container_utils.get_runtime(), "load"],
|
||||
[
|
||||
container_utils.get_runtime(),
|
||||
"load",
|
||||
"-i",
|
||||
get_resource_path("container.tar"),
|
||||
],
|
||||
returncode=-1,
|
||||
)
|
||||
|
||||
|
@ -80,7 +83,7 @@ class TestContainer(IsolationProviderTest):
|
|||
provider.install()
|
||||
|
||||
def test_install_raises_if_still_not_installed(
|
||||
self, mocker: MockerFixture, provider: Container, fp: FakeProcess
|
||||
self, provider: Container, fp: FakeProcess
|
||||
) -> None:
|
||||
"""When an image keep being not installed, it should return False"""
|
||||
|
||||
|
@ -101,10 +104,13 @@ class TestContainer(IsolationProviderTest):
|
|||
occurrences=2,
|
||||
)
|
||||
|
||||
# Patch gzip.open and podman load so that it works
|
||||
mocker.patch("gzip.open", mocker.mock_open(read_data=""))
|
||||
fp.register_subprocess(
|
||||
[container_utils.get_runtime(), "load"],
|
||||
[
|
||||
container_utils.get_runtime(),
|
||||
"load",
|
||||
"-i",
|
||||
get_resource_path("container.tar"),
|
||||
],
|
||||
)
|
||||
with pytest.raises(errors.ImageNotPresentException):
|
||||
provider.install()
|
||||
|
@ -191,7 +197,7 @@ class TestContainer(IsolationProviderTest):
|
|||
reason="Linux specific",
|
||||
)
|
||||
def test_linux_skips_desktop_version_check_returns_true(
|
||||
self, mocker: MockerFixture, provider: Container
|
||||
self, provider: Container
|
||||
) -> None:
|
||||
assert (True, "") == provider.check_docker_desktop_version()
|
||||
|
||||
|
|
Loading…
Reference in a new issue