mirror of
https://github.com/freedomofpress/dangerzone.git
synced 2025-05-19 19:50:33 +02:00
Compare commits
10 commits
2981ec4450
...
c96c0d6eed
Author | SHA1 | Date | |
---|---|---|---|
![]() |
c96c0d6eed | ||
![]() |
69f4d296ec | ||
![]() |
c2d37dfb04 | ||
![]() |
60c144aab0 | ||
![]() |
ad3d0e4182 | ||
![]() |
af6b4e0d73 | ||
![]() |
4542f0b4c4 | ||
![]() |
10957dfe02 | ||
![]() |
94e51840e7 | ||
![]() |
e67fbc1e72 |
12 changed files with 337 additions and 503 deletions
162
.github/workflows/multi_arch_build.yml
vendored
162
.github/workflows/multi_arch_build.yml
vendored
|
@ -1,162 +0,0 @@
|
|||
name: Multi-arch build
|
||||
|
||||
on:
|
||||
push:
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io/${{ github.repository_owner }}
|
||||
REGISTRY_USER: ${{ github.actor }}
|
||||
REGISTRY_PASSWORD: ${{ github.token }}
|
||||
IMAGE_NAME: dangerzone/dangerzone
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- linux/amd64
|
||||
- linux/arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Get current date
|
||||
id: date
|
||||
run: echo "date=$(date +'%Y%m%d')" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
platform=${{ matrix.platform }}
|
||||
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
|
||||
- name: Login to GHCR
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build and push by digest
|
||||
id: build
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ./dangerzone/
|
||||
file: Dockerfile
|
||||
build-args: |
|
||||
DEBIAN_ARCHIVE_DATE=${{ steps.date.outputs.date }}
|
||||
## Remove potentially incorrect Docker provenance.
|
||||
#provenance: false
|
||||
platforms: ${{ matrix.platform }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
outputs: type=image,"name=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}",push-by-digest=true,name-canonical=true,push=true
|
||||
|
||||
- name: Export digest
|
||||
run: |
|
||||
mkdir -p ${{ runner.temp }}/digests
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
touch "${{ runner.temp }}/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: digests-${{ env.PLATFORM_PAIR }}
|
||||
path: ${{ runner.temp }}/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
merge:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- build
|
||||
outputs:
|
||||
digest: ${{ steps.image.outputs.digest }}
|
||||
image: ${{ steps.image.outputs.image }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Compute image tag
|
||||
id: tag
|
||||
run: |
|
||||
DATE=$(date +'%Y%m%d')
|
||||
TAG=$(git describe --long --first-parent | tail -c +2)
|
||||
echo "tag=${DATE}-${TAG}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: ${{ runner.temp }}/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Login to GHCR
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
#- name: Docker meta
|
||||
# id: meta
|
||||
# uses: docker/metadata-action@v5
|
||||
# with:
|
||||
# images: |
|
||||
# ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
# tags: |
|
||||
# type=ref,event=branch
|
||||
# type=ref,event=pr
|
||||
# type=semver,pattern={{version}}
|
||||
# type=semver,pattern={{major}}.{{minor}}
|
||||
|
||||
- name: Create manifest list and push
|
||||
working-directory: ${{ runner.temp }}/digests
|
||||
run: |
|
||||
IMAGE=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.tag.outputs.tag }}
|
||||
DIGESTS=$(printf '${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}@sha256:%s ' *)
|
||||
docker buildx imagetools create -t ${IMAGE} ${DIGESTS}
|
||||
|
||||
- name: Inspect image
|
||||
id: image
|
||||
run: |
|
||||
# NOTE: Set the image as an output because the `env` context is not
|
||||
# available to the inputs of a reusable workflow call.
|
||||
image_name="${REGISTRY}/${IMAGE_NAME}"
|
||||
echo "image=$image_name" >> "$GITHUB_OUTPUT"
|
||||
docker buildx imagetools inspect ${image_name}:${{ steps.tag.outputs.tag }}
|
||||
digest=$(docker buildx imagetools inspect ${image_name}:${{ steps.tag.outputs.tag }} --format "{{json .Manifest}}" | jq -r '.digest')
|
||||
echo "digest=$digest" >> "$GITHUB_OUTPUT"
|
||||
|
||||
# This step calls the container workflow to generate provenance and push it to
|
||||
# the container registry.
|
||||
provenance:
|
||||
needs:
|
||||
- merge
|
||||
permissions:
|
||||
actions: read # for detecting the Github Actions environment.
|
||||
id-token: write # for creating OIDC tokens for signing.
|
||||
packages: write # for uploading attestations.
|
||||
uses: slsa-framework/slsa-github-generator/.github/workflows/generator_container_slsa3.yml@v2.0.0
|
||||
with:
|
||||
digest: ${{ needs.merge.outputs.digest }}
|
||||
image: ${{ needs.merge.outputs.image }}
|
||||
registry-username: ${{ github.actor }}
|
||||
secrets:
|
||||
registry-password: ${{ secrets.GITHUB_TOKEN }}
|
183
.github/workflows/release-container-image.yml
vendored
183
.github/workflows/release-container-image.yml
vendored
|
@ -1,17 +1,13 @@
|
|||
name: Release container image
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- "container-image/**"
|
||||
branches:
|
||||
- "test/image-**"
|
||||
workflow_dispatch:
|
||||
name: Release multi-arch container image
|
||||
|
||||
permissions:
|
||||
id-token: write
|
||||
packages: write
|
||||
contents: read
|
||||
attestations: write
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- "test/**"
|
||||
schedule:
|
||||
- cron: "0 0 * * *" # Run every day at 00:00 UTC.
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io/${{ github.repository_owner }}
|
||||
|
@ -20,38 +16,153 @@ env:
|
|||
IMAGE_NAME: dangerzone/dangerzone
|
||||
|
||||
jobs:
|
||||
build-container-image:
|
||||
runs-on: ubuntu-24.04
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
platform:
|
||||
- linux/amd64
|
||||
- linux/arm64
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Get current date
|
||||
id: date
|
||||
run: echo "date=$(date +'%Y%m%d')" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
platform=${{ matrix.platform }}
|
||||
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: |
|
||||
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
|
||||
- name: Login to GHCR
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build and push by digest
|
||||
id: build
|
||||
uses: docker/build-push-action@v6
|
||||
with:
|
||||
context: ./dangerzone/
|
||||
file: Dockerfile
|
||||
build-args: |
|
||||
DEBIAN_ARCHIVE_DATE=${{ steps.date.outputs.date }}
|
||||
## Remove potentially incorrect Docker provenance.
|
||||
#provenance: false
|
||||
platforms: ${{ matrix.platform }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
outputs: type=image,"name=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}",push-by-digest=true,name-canonical=true,push=true
|
||||
|
||||
- name: Export digest
|
||||
run: |
|
||||
mkdir -p ${{ runner.temp }}/digests
|
||||
digest="${{ steps.build.outputs.digest }}"
|
||||
touch "${{ runner.temp }}/digests/${digest#sha256:}"
|
||||
|
||||
- name: Upload digest
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: digests-${{ env.PLATFORM_PAIR }}
|
||||
path: ${{ runner.temp }}/digests/*
|
||||
if-no-files-found: error
|
||||
retention-days: 1
|
||||
|
||||
merge:
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- build
|
||||
outputs:
|
||||
digest: ${{ steps.image.outputs.digest }}
|
||||
image: ${{ steps.image.outputs.image }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
- name: Compute image tag
|
||||
id: tag
|
||||
run: |
|
||||
DATE=$(date +'%Y%m%d')
|
||||
TAG=$(git describe --long --first-parent | tail -c +2)
|
||||
echo "tag=${DATE}-${TAG}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Download digests
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
path: ${{ runner.temp }}/digests
|
||||
pattern: digests-*
|
||||
merge-multiple: true
|
||||
|
||||
- name: Login to GHCR
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: USERNAME
|
||||
password: ${{ github.token }}
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build and push the dangerzone image
|
||||
id: build-image
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
#- name: Docker meta
|
||||
# id: meta
|
||||
# uses: docker/metadata-action@v5
|
||||
# with:
|
||||
# images: |
|
||||
# ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
# tags: |
|
||||
# type=ref,event=branch
|
||||
# type=ref,event=pr
|
||||
# type=semver,pattern={{version}}
|
||||
# type=semver,pattern={{major}}.{{minor}}
|
||||
|
||||
- name: Create manifest list and push
|
||||
working-directory: ${{ runner.temp }}/digests
|
||||
run: |
|
||||
sudo apt-get install -y python3-poetry
|
||||
python3 ./install/common/build-image.py
|
||||
echo ${{ github.token }} | podman login ghcr.io -u USERNAME --password-stdin
|
||||
IMAGE=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.tag.outputs.tag }}
|
||||
DIGESTS=$(printf '${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}@sha256:%s ' *)
|
||||
docker buildx imagetools create -t ${IMAGE} ${DIGESTS}
|
||||
|
||||
# Load the image with the final name directly
|
||||
gunzip -c share/container.tar.gz | podman load
|
||||
FINAL_IMAGE_NAME="${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}"
|
||||
TAG=$(git describe --long --first-parent | tail -c +2)
|
||||
podman tag dangerzone.rocks/dangerzone:$TAG "$FINAL_IMAGE_NAME"
|
||||
podman push "$FINAL_IMAGE_NAME" --digestfile=digest
|
||||
echo "digest=$(cat digest)" >> "$GITHUB_OUTPUT"
|
||||
- name: Inspect image
|
||||
id: image
|
||||
run: |
|
||||
# NOTE: Set the image as an output because the `env` context is not
|
||||
# available to the inputs of a reusable workflow call.
|
||||
image_name="${REGISTRY}/${IMAGE_NAME}"
|
||||
echo "image=$image_name" >> "$GITHUB_OUTPUT"
|
||||
docker buildx imagetools inspect ${image_name}:${{ steps.tag.outputs.tag }}
|
||||
digest=$(docker buildx imagetools inspect ${image_name}:${{ steps.tag.outputs.tag }} --format "{{json .Manifest}}" | jq -r '.digest')
|
||||
echo "digest=$digest" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Generate artifact attestation
|
||||
uses: actions/attest-build-provenance@v1
|
||||
# This step calls the container workflow to generate provenance and push it to
|
||||
# the container registry.
|
||||
provenance:
|
||||
needs:
|
||||
- merge
|
||||
permissions:
|
||||
actions: read # for detecting the Github Actions environment.
|
||||
id-token: write # for creating OIDC tokens for signing.
|
||||
packages: write # for uploading attestations.
|
||||
uses: slsa-framework/slsa-github-generator/.github/workflows/generator_container_slsa3.yml@v2.0.0
|
||||
with:
|
||||
subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
subject-digest: "${{ steps.build-image.outputs.digest }}"
|
||||
push-to-registry: true
|
||||
digest: ${{ needs.merge.outputs.digest }}
|
||||
image: ${{ needs.merge.outputs.image }}
|
||||
registry-username: ${{ github.actor }}
|
||||
secrets:
|
||||
registry-password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
|
|
@ -8,7 +8,8 @@ from typing import List, Optional, Tuple
|
|||
from . import errors
|
||||
from .util import get_resource_path, get_subprocess_startupinfo
|
||||
|
||||
CONTAINER_NAME = "dangerzone.rocks/dangerzone"
|
||||
OLD_CONTAINER_NAME = "dangerzone.rocks/dangerzone"
|
||||
CONTAINER_NAME = "ghcr.io/almet/dangerzone/dangerzone"
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
@ -110,12 +111,6 @@ def delete_image_tag(tag: str) -> None:
|
|||
)
|
||||
|
||||
|
||||
def get_expected_tag() -> str:
|
||||
"""Get the tag of the Dangerzone image tarball from the image-id.txt file."""
|
||||
with open(get_resource_path("image-id.txt")) as f:
|
||||
return f.read().strip()
|
||||
|
||||
|
||||
def load_image_tarball_in_memory() -> None:
|
||||
log.info("Installing Dangerzone container image...")
|
||||
p = subprocess.Popen(
|
||||
|
@ -192,13 +187,22 @@ def container_pull(image: str) -> bool:
|
|||
return process.returncode == 0
|
||||
|
||||
|
||||
def get_local_image_hash(image: str) -> Optional[str]:
|
||||
def get_local_image_digest(image: str) -> Optional[str]:
|
||||
"""
|
||||
Returns a image hash from a local image name
|
||||
"""
|
||||
cmd = [get_runtime_name(), "image", "inspect", image, "-f", "{{.Digest}}"]
|
||||
# Get the image hash from the podman images command, as
|
||||
# podman inspect returns a the digest of the architecture-bound image
|
||||
cmd = [get_runtime_name(), "images", image, "--format", "{{.Digest}}"]
|
||||
log.debug(" ".join(cmd))
|
||||
try:
|
||||
result = subprocess.run(cmd, capture_output=True, check=True)
|
||||
lines = result.stdout.decode().strip().split("\n")
|
||||
if len(lines) != 1:
|
||||
raise errors.MultipleImagesFoundException(
|
||||
f"Expected a single line of output, got {len(lines)} lines"
|
||||
)
|
||||
return lines[0].replace("sha256:", "")
|
||||
except subprocess.CalledProcessError as e:
|
||||
return None
|
||||
else:
|
||||
|
|
|
@ -126,6 +126,10 @@ class ImageNotPresentException(Exception):
|
|||
pass
|
||||
|
||||
|
||||
class MultipleImagesFoundException(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class ImageInstallationException(Exception):
|
||||
pass
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@ import shlex
|
|||
import subprocess
|
||||
from typing import List, Tuple
|
||||
|
||||
from .. import container_utils, errors
|
||||
from .. import container_utils, errors, updater
|
||||
from ..document import Document
|
||||
from ..util import get_resource_path, get_subprocess_startupinfo
|
||||
from .base import IsolationProvider, terminate_process_group
|
||||
|
@ -78,40 +78,22 @@ class Container(IsolationProvider):
|
|||
|
||||
@staticmethod
|
||||
def install() -> bool:
|
||||
"""Install the container image tarball, or verify that it's already installed.
|
||||
"""Check if an update is available and install it if necessary."""
|
||||
# XXX Do this only if users have optted in to auto-updates
|
||||
|
||||
Perform the following actions:
|
||||
1. Get the tags of any locally available images that match Dangerzone's image
|
||||
name.
|
||||
2. Get the expected image tag from the image-id.txt file.
|
||||
- If this tag is present in the local images, then we can return.
|
||||
- Else, prune the older container images and continue.
|
||||
3. Load the image tarball and make sure it matches the expected tag.
|
||||
"""
|
||||
old_tags = container_utils.list_image_tags()
|
||||
expected_tag = container_utils.get_expected_tag()
|
||||
|
||||
if expected_tag not in old_tags:
|
||||
# Prune older container images.
|
||||
log.info(
|
||||
f"Could not find a Dangerzone container image with tag '{expected_tag}'"
|
||||
# # Load the image tarball into the container runtime.
|
||||
update_available, image_digest = updater.is_update_available(
|
||||
container_utils.CONTAINER_NAME
|
||||
)
|
||||
if update_available:
|
||||
updater.upgrade_container_image(
|
||||
container_utils.CONTAINER_NAME,
|
||||
image_digest,
|
||||
updater.DEFAULT_PUBKEY_LOCATION,
|
||||
)
|
||||
for tag in old_tags:
|
||||
container_utils.delete_image_tag(tag)
|
||||
else:
|
||||
return True
|
||||
|
||||
# Load the image tarball into the container runtime.
|
||||
container_utils.load_image_tarball_in_memory()
|
||||
|
||||
# Check that the container image has the expected image tag.
|
||||
# See https://github.com/freedomofpress/dangerzone/issues/988 for an example
|
||||
# where this was not the case.
|
||||
new_tags = container_utils.list_image_tags()
|
||||
if expected_tag not in new_tags:
|
||||
raise errors.ImageNotPresentException(
|
||||
f"Could not find expected tag '{expected_tag}' after loading the"
|
||||
" container image tarball"
|
||||
updater.verify_local_image(
|
||||
container_utils.CONTAINER_NAME, updater.DEFAULT_PUBKEY_LOCATION
|
||||
)
|
||||
|
||||
return True
|
||||
|
@ -193,6 +175,13 @@ class Container(IsolationProvider):
|
|||
name: str,
|
||||
) -> subprocess.Popen:
|
||||
container_runtime = container_utils.get_runtime()
|
||||
|
||||
image_digest = container_utils.get_local_image_digest(
|
||||
container_utils.CONTAINER_NAME
|
||||
)
|
||||
updater.verify_local_image(
|
||||
container_utils.CONTAINER_NAME, updater.DEFAULT_PUBKEY_LOCATION
|
||||
)
|
||||
security_args = self.get_runtime_security_args()
|
||||
debug_args = []
|
||||
if self.debug:
|
||||
|
@ -201,9 +190,7 @@ class Container(IsolationProvider):
|
|||
enable_stdin = ["-i"]
|
||||
set_name = ["--name", name]
|
||||
prevent_leakage_args = ["--rm"]
|
||||
image_name = [
|
||||
container_utils.CONTAINER_NAME + ":" + container_utils.get_expected_tag()
|
||||
]
|
||||
image_name = [container_utils.CONTAINER_NAME + "@sha256:" + image_digest]
|
||||
args = (
|
||||
["run"]
|
||||
+ security_args
|
||||
|
|
|
@ -1,189 +0,0 @@
|
|||
import gzip
|
||||
import logging
|
||||
import platform
|
||||
import shutil
|
||||
import subprocess
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
from . import errors
|
||||
from .util import get_resource_path, get_subprocess_startupinfo
|
||||
|
||||
CONTAINER_NAME = "dangerzone.rocks/dangerzone"
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_runtime_name() -> str:
|
||||
if platform.system() == "Linux":
|
||||
return "podman"
|
||||
# Windows, Darwin, and unknown use docker for now, dangerzone-vm eventually
|
||||
return "docker"
|
||||
|
||||
|
||||
def get_runtime_version() -> Tuple[int, int]:
|
||||
"""Get the major/minor parts of the Docker/Podman version.
|
||||
|
||||
Some of the operations we perform in this module rely on some Podman features
|
||||
that are not available across all of our platforms. In order to have a proper
|
||||
fallback, we need to know the Podman version. More specifically, we're fine with
|
||||
just knowing the major and minor version, since writing/installing a full-blown
|
||||
semver parser is an overkill.
|
||||
"""
|
||||
# Get the Docker/Podman version, using a Go template.
|
||||
runtime = get_runtime_name()
|
||||
if runtime == "podman":
|
||||
query = "{{.Client.Version}}"
|
||||
else:
|
||||
query = "{{.Server.Version}}"
|
||||
|
||||
cmd = [runtime, "version", "-f", query]
|
||||
try:
|
||||
version = subprocess.run(
|
||||
cmd,
|
||||
startupinfo=get_subprocess_startupinfo(),
|
||||
capture_output=True,
|
||||
check=True,
|
||||
).stdout.decode()
|
||||
except Exception as e:
|
||||
msg = f"Could not get the version of the {runtime.capitalize()} tool: {e}"
|
||||
raise RuntimeError(msg) from e
|
||||
|
||||
# Parse this version and return the major/minor parts, since we don't need the
|
||||
# rest.
|
||||
try:
|
||||
major, minor, _ = version.split(".", 3)
|
||||
return (int(major), int(minor))
|
||||
except Exception as e:
|
||||
msg = (
|
||||
f"Could not parse the version of the {runtime.capitalize()} tool"
|
||||
f" (found: '{version}') due to the following error: {e}"
|
||||
)
|
||||
raise RuntimeError(msg)
|
||||
|
||||
|
||||
def get_runtime() -> str:
|
||||
container_tech = get_runtime_name()
|
||||
runtime = shutil.which(container_tech)
|
||||
if runtime is None:
|
||||
raise errors.NoContainerTechException(container_tech)
|
||||
return runtime
|
||||
|
||||
|
||||
def list_image_tags() -> List[str]:
|
||||
"""Get the tags of all loaded Dangerzone images.
|
||||
|
||||
This method returns a mapping of image tags to image IDs, for all Dangerzone
|
||||
images. This can be useful when we want to find which are the local image tags,
|
||||
and which image ID does the "latest" tag point to.
|
||||
"""
|
||||
return (
|
||||
subprocess.check_output(
|
||||
[
|
||||
get_runtime(),
|
||||
"image",
|
||||
"list",
|
||||
"--format",
|
||||
"{{ .Tag }}",
|
||||
CONTAINER_NAME,
|
||||
],
|
||||
text=True,
|
||||
startupinfo=get_subprocess_startupinfo(),
|
||||
)
|
||||
.strip()
|
||||
.split()
|
||||
)
|
||||
|
||||
|
||||
def delete_image_tag(tag: str) -> None:
|
||||
"""Delete a Dangerzone image tag."""
|
||||
name = CONTAINER_NAME + ":" + tag
|
||||
log.warning(f"Deleting old container image: {name}")
|
||||
try:
|
||||
subprocess.check_output(
|
||||
[get_runtime(), "rmi", "--force", name],
|
||||
startupinfo=get_subprocess_startupinfo(),
|
||||
)
|
||||
except Exception as e:
|
||||
log.warning(
|
||||
f"Couldn't delete old container image '{name}', so leaving it there."
|
||||
f" Original error: {e}"
|
||||
)
|
||||
|
||||
|
||||
def get_expected_tag() -> str:
|
||||
"""Get the tag of the Dangerzone image tarball from the image-id.txt file."""
|
||||
with open(get_resource_path("image-id.txt")) as f:
|
||||
return f.read().strip()
|
||||
|
||||
|
||||
def tag_image_by_digest(digest: str, tag: str) -> None:
|
||||
image_id = get_image_id_by_digest(digest)
|
||||
cmd = [get_runtime(), "tag", image_id, tag]
|
||||
subprocess.run(cmd, startupinfo=get_subprocess_startupinfo(), check=True)
|
||||
|
||||
|
||||
def get_image_id_by_digest(digest: str) -> str:
|
||||
cmd = [
|
||||
get_runtime(),
|
||||
"image",
|
||||
"tag",
|
||||
"-f",
|
||||
f'digest="{digest}"',
|
||||
"--format ",
|
||||
"{{.Id}}",
|
||||
]
|
||||
process = subprocess.run(
|
||||
cmd, startupinfo=get_subprocess_startupinfo(), check=True, capture_output=True
|
||||
)
|
||||
return process.stdout.decode().strip()
|
||||
|
||||
|
||||
def load_image_tarball_in_memory(
|
||||
compressed_container_path: Optional[str] = None,
|
||||
) -> None:
|
||||
if compressed_container_path is None:
|
||||
compressed_container_path = get_resource_path("container.tar.gz")
|
||||
|
||||
log.info("Installing Dangerzone container image...")
|
||||
p = subprocess.Popen(
|
||||
[get_runtime(), "load"],
|
||||
stdin=subprocess.PIPE,
|
||||
startupinfo=get_subprocess_startupinfo(),
|
||||
)
|
||||
|
||||
chunk_size = 4 << 20
|
||||
|
||||
with gzip.open(compressed_container_path) as f:
|
||||
while True:
|
||||
chunk = f.read(chunk_size)
|
||||
if len(chunk) > 0:
|
||||
if p.stdin:
|
||||
p.stdin.write(chunk)
|
||||
else:
|
||||
break
|
||||
_, err = p.communicate()
|
||||
if p.returncode < 0:
|
||||
if err:
|
||||
error = err.decode()
|
||||
else:
|
||||
error = "No output"
|
||||
raise errors.ImageInstallationException(
|
||||
f"Could not install container image: {error}"
|
||||
)
|
||||
|
||||
log.info("Successfully installed container image from")
|
||||
|
||||
|
||||
def load_image_tarball_file(container_path: str) -> None:
|
||||
cmd = [get_runtime(), "load", "-i", container_path]
|
||||
subprocess.run(cmd, startupinfo=get_subprocess_startupinfo(), check=True)
|
||||
|
||||
log.info("Successfully installed container image from %s", container_path)
|
||||
|
||||
|
||||
def container_pull(image: str) -> bool:
|
||||
# XXX - Move to container_utils.py
|
||||
cmd = [get_runtime_name(), "pull", f"{image}"]
|
||||
process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
|
||||
process.communicate()
|
||||
return process.returncode == 0
|
|
@ -1,3 +1,10 @@
|
|||
import logging
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
from .signatures import (
|
||||
DEFAULT_PUBKEY_LOCATION,
|
||||
is_update_available,
|
||||
upgrade_container_image,
|
||||
verify_local_image,
|
||||
)
|
||||
|
|
|
@ -4,13 +4,11 @@ import logging
|
|||
|
||||
import click
|
||||
|
||||
from ..util import get_resource_path
|
||||
from . import attestations, errors, log, registry, signatures
|
||||
|
||||
DEFAULT_REPOSITORY = "freedomofpress/dangerzone"
|
||||
DEFAULT_BRANCH = "main"
|
||||
DEFAULT_IMAGE_NAME = "ghcr.io/freedomofpress/dangerzone/dangerzone"
|
||||
PUBKEY_DEFAULT_LOCATION = get_resource_path("freedomofpress-dangerzone-pub.key")
|
||||
|
||||
|
||||
@click.group()
|
||||
|
@ -26,12 +24,12 @@ def main(debug: bool) -> None:
|
|||
|
||||
@main.command()
|
||||
@click.argument("image", default=DEFAULT_IMAGE_NAME)
|
||||
@click.option("--pubkey", default=PUBKEY_DEFAULT_LOCATION)
|
||||
@click.option("--pubkey", default=signatures.DEFAULT_PUBKEY_LOCATION)
|
||||
def upgrade(image: str, pubkey: str) -> None:
|
||||
"""Upgrade the image to the latest signed version."""
|
||||
manifest_hash = registry.get_manifest_hash(image)
|
||||
manifest_digest = registry.get_manifest_digest(image)
|
||||
try:
|
||||
is_upgraded = signatures.upgrade_container_image(image, manifest_hash, pubkey)
|
||||
is_upgraded = signatures.upgrade_container_image(image, manifest_digest, pubkey)
|
||||
if is_upgraded:
|
||||
click.echo(f"✅ The local image {image} has been upgraded")
|
||||
click.echo(f"✅ The image has been signed with {pubkey}")
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import hashlib
|
||||
import re
|
||||
from collections import namedtuple
|
||||
from hashlib import sha256
|
||||
from typing import Dict, Optional, Tuple
|
||||
|
||||
import requests
|
||||
|
@ -8,14 +8,24 @@ import requests
|
|||
from . import errors, log
|
||||
|
||||
__all__ = [
|
||||
"get_manifest_hash",
|
||||
"get_manifest_digest",
|
||||
"list_tags",
|
||||
"get_manifest",
|
||||
"parse_image_location",
|
||||
]
|
||||
|
||||
SIGSTORE_BUNDLE = "application/vnd.dev.sigstore.bundle.v0.3+json"
|
||||
ACCEPT_MANIFESTS_HEADER="application/vnd.docker.distribution.manifest.v1+json,application/vnd.docker.distribution.manifest.v1+prettyjws,application/vnd.docker.distribution.manifest.v2+json,application/vnd.oci.image.manifest.v1+json,application/vnd.docker.distribution.manifest.list.v2+json,application/vnd.oci.image.index.v1+json"
|
||||
IMAGE_INDEX_MEDIA_TYPE = "application/vnd.oci.image.index.v1+json"
|
||||
ACCEPT_MANIFESTS_HEADER = ",".join(
|
||||
[
|
||||
"application/vnd.docker.distribution.manifest.v1+json",
|
||||
"application/vnd.docker.distribution.manifest.v1+prettyjws",
|
||||
"application/vnd.docker.distribution.manifest.v2+json",
|
||||
"application/vnd.oci.image.manifest.v1+json",
|
||||
"application/vnd.docker.distribution.manifest.list.v2+json",
|
||||
IMAGE_INDEX_MEDIA_TYPE,
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class Image(namedtuple("Image", ["registry", "namespace", "image_name", "tag"])):
|
||||
|
@ -89,7 +99,8 @@ class RegistryClient:
|
|||
return tags
|
||||
|
||||
def get_manifest(
|
||||
self, tag: str,
|
||||
self,
|
||||
tag: str,
|
||||
) -> requests.Response:
|
||||
"""Get manifest information for a specific tag"""
|
||||
manifest_url = f"{self._image_url}/manifests/{tag}"
|
||||
|
@ -111,8 +122,8 @@ class RegistryClient:
|
|||
.get("manifests")
|
||||
)
|
||||
|
||||
def get_blob(self, hash: str) -> requests.Response:
|
||||
url = f"{self._image_url}/blobs/{hash}"
|
||||
def get_blob(self, digest: str) -> requests.Response:
|
||||
url = f"{self._image_url}/blobs/{digest}"
|
||||
response = requests.get(
|
||||
url,
|
||||
headers={
|
||||
|
@ -122,17 +133,19 @@ class RegistryClient:
|
|||
response.raise_for_status()
|
||||
return response
|
||||
|
||||
def get_manifest_hash(
|
||||
def get_manifest_digest(
|
||||
self, tag: str, tag_manifest_content: Optional[bytes] = None
|
||||
) -> str:
|
||||
if not tag_manifest_content:
|
||||
tag_manifest_content = self.get_manifest(tag).content
|
||||
|
||||
return hashlib.sha256(tag_manifest_content).hexdigest()
|
||||
return sha256(tag_manifest_content).hexdigest()
|
||||
|
||||
def get_manifest_hash(image_str: str) -> str:
|
||||
|
||||
# XXX Refactor this with regular functions rather than a class
|
||||
def get_manifest_digest(image_str: str) -> str:
|
||||
image = parse_image_location(image_str)
|
||||
return RegistryClient(image).get_manifest_hash(image.tag)
|
||||
return RegistryClient(image).get_manifest_digest(image.tag)
|
||||
|
||||
|
||||
def list_tags(image_str: str) -> list:
|
||||
|
|
|
@ -11,6 +11,7 @@ from tempfile import NamedTemporaryFile, TemporaryDirectory
|
|||
from typing import Dict, List, Optional, Tuple
|
||||
|
||||
from .. import container_utils as runtime
|
||||
from ..util import get_resource_path
|
||||
from . import cosign, errors, log, registry
|
||||
|
||||
try:
|
||||
|
@ -24,6 +25,7 @@ def get_config_dir() -> Path:
|
|||
|
||||
|
||||
# XXX Store this somewhere else.
|
||||
DEFAULT_PUBKEY_LOCATION = get_resource_path("freedomofpress-dangerzone-pub.key")
|
||||
SIGNATURES_PATH = get_config_dir() / "signatures"
|
||||
__all__ = [
|
||||
"verify_signature",
|
||||
|
@ -55,7 +57,7 @@ def signature_to_bundle(sig: Dict) -> Dict:
|
|||
}
|
||||
|
||||
|
||||
def verify_signature(signature: dict, image_hash: str, pubkey: str) -> bool:
|
||||
def verify_signature(signature: dict, image_digest: str, pubkey: str) -> bool:
|
||||
"""Verify a signature against a given public key"""
|
||||
# XXX - Also verfy the identity/docker-reference field against the expected value
|
||||
# e.g. ghcr.io/freedomofpress/dangerzone/dangerzone
|
||||
|
@ -64,12 +66,12 @@ def verify_signature(signature: dict, image_hash: str, pubkey: str) -> bool:
|
|||
signature_bundle = signature_to_bundle(signature)
|
||||
|
||||
payload_bytes = b64decode(signature_bundle["Payload"])
|
||||
payload_hash = json.loads(payload_bytes)["critical"]["image"][
|
||||
payload_digest = json.loads(payload_bytes)["critical"]["image"][
|
||||
"docker-manifest-digest"
|
||||
]
|
||||
if payload_hash != f"sha256:{image_hash}":
|
||||
if payload_digest != f"sha256:{image_digest}":
|
||||
raise errors.SignatureMismatch(
|
||||
f"The signature does not match the image hash ({payload_hash}, {image_hash})"
|
||||
f"The signature does not match the image digest ({payload_digest}, {image_digest})"
|
||||
)
|
||||
|
||||
with (
|
||||
|
@ -103,44 +105,48 @@ def verify_signature(signature: dict, image_hash: str, pubkey: str) -> bool:
|
|||
return False
|
||||
|
||||
|
||||
def new_image_release(image: str) -> bool:
|
||||
remote_hash = registry.get_manifest_hash(image)
|
||||
local_hash = runtime.get_local_image_hash(image)
|
||||
log.debug("Remote hash: %s", remote_hash)
|
||||
log.debug("Local hash: %s", local_hash)
|
||||
return remote_hash != local_hash
|
||||
def is_update_available(image: str) -> (bool, Optional[str]):
|
||||
remote_digest = registry.get_manifest_digest(image)
|
||||
local_digest = runtime.get_local_image_digest(image)
|
||||
log.debug("Remote digest: %s", remote_digest)
|
||||
log.debug("Local digest: %s", local_digest)
|
||||
has_update = remote_digest != local_digest
|
||||
if has_update:
|
||||
return True, remote_digest
|
||||
return False, None
|
||||
|
||||
|
||||
def verify_signatures(
|
||||
signatures: List[Dict],
|
||||
image_hash: str,
|
||||
image_digest: str,
|
||||
pubkey: str,
|
||||
) -> bool:
|
||||
for signature in signatures:
|
||||
if not verify_signature(signature, image_hash, pubkey):
|
||||
if not verify_signature(signature, image_digest, pubkey):
|
||||
raise errors.SignatureVerificationError()
|
||||
return True
|
||||
|
||||
|
||||
def upgrade_container_image(image: str, manifest_hash: str, pubkey: str) -> bool:
|
||||
def upgrade_container_image(image: str, manifest_digest: str, pubkey: str) -> bool:
|
||||
"""Verify and upgrade the image to the latest, if signed."""
|
||||
if not new_image_release(image):
|
||||
update_available, _ = is_update_available(image)
|
||||
if not update_available:
|
||||
raise errors.ImageAlreadyUpToDate("The image is already up to date")
|
||||
|
||||
signatures = get_remote_signatures(image, manifest_hash)
|
||||
verify_signatures(signatures, manifest_hash, pubkey)
|
||||
signatures = get_remote_signatures(image, manifest_digest)
|
||||
verify_signatures(signatures, manifest_digest, pubkey)
|
||||
|
||||
# At this point, the signatures are verified
|
||||
# We store the signatures just now to avoid storing unverified signatures
|
||||
store_signatures(signatures, manifest_hash, pubkey)
|
||||
store_signatures(signatures, manifest_digest, pubkey)
|
||||
|
||||
# let's upgrade the image
|
||||
# XXX Use the image digest here to avoid race conditions
|
||||
return runtime.container_pull(image)
|
||||
|
||||
|
||||
def _get_blob(tmpdir: str, hash: str) -> Path:
|
||||
return Path(tmpdir) / "blobs" / "sha256" / hash.replace("sha256:", "")
|
||||
def _get_blob(tmpdir: str, digest: str) -> Path:
|
||||
return Path(tmpdir) / "blobs" / "sha256" / digest.replace("sha256:", "")
|
||||
|
||||
|
||||
def upgrade_container_image_airgapped(container_tar: str, pubkey: str) -> str:
|
||||
|
@ -181,7 +187,8 @@ def upgrade_container_image_airgapped(container_tar: str, pubkey: str) -> str:
|
|||
index_json["manifests"] = [
|
||||
manifest
|
||||
for manifest in index_json["manifests"]
|
||||
if manifest["annotations"].get("kind") != "dev.cosignproject.cosign/sigs"
|
||||
if manifest["annotations"].get("kind")
|
||||
in ("dev.cosignproject.cosign/imageIndex", "dev.cosignproject.cosign/image")
|
||||
]
|
||||
|
||||
with open(signature_filename, "rb") as f:
|
||||
|
@ -240,8 +247,8 @@ def convert_oci_images_signatures(
|
|||
return image_name, signatures
|
||||
|
||||
|
||||
def get_file_hash(file: Optional[str] = None, content: Optional[bytes] = None) -> str:
|
||||
"""Get the sha256 hash of a file or content"""
|
||||
def get_file_digest(file: Optional[str] = None, content: Optional[bytes] = None) -> str:
|
||||
"""Get the sha256 digest of a file or content"""
|
||||
if not file and not content:
|
||||
raise errors.UpdaterError("No file or content provided")
|
||||
if file:
|
||||
|
@ -252,13 +259,13 @@ def get_file_hash(file: Optional[str] = None, content: Optional[bytes] = None) -
|
|||
return ""
|
||||
|
||||
|
||||
def load_signatures(image_hash: str, pubkey: str) -> List[Dict]:
|
||||
def load_signatures(image_digest: str, pubkey: str) -> List[Dict]:
|
||||
"""
|
||||
Load signatures from the local filesystem
|
||||
|
||||
See store_signatures() for the expected format.
|
||||
"""
|
||||
pubkey_signatures = SIGNATURES_PATH / get_file_hash(pubkey)
|
||||
pubkey_signatures = SIGNATURES_PATH / get_file_digest(pubkey)
|
||||
if not pubkey_signatures.exists():
|
||||
msg = (
|
||||
f"Cannot find a '{pubkey_signatures}' folder."
|
||||
|
@ -266,19 +273,19 @@ def load_signatures(image_hash: str, pubkey: str) -> List[Dict]:
|
|||
)
|
||||
raise errors.SignaturesFolderDoesNotExist(msg)
|
||||
|
||||
with open(pubkey_signatures / f"{image_hash}.json") as f:
|
||||
with open(pubkey_signatures / f"{image_digest}.json") as f:
|
||||
log.debug("Loading signatures from %s", f.name)
|
||||
return json.load(f)
|
||||
|
||||
|
||||
def store_signatures(signatures: list[Dict], image_hash: str, pubkey: str) -> None:
|
||||
def store_signatures(signatures: list[Dict], image_digest: str, pubkey: str) -> None:
|
||||
"""
|
||||
Store signatures locally in the SIGNATURE_PATH folder, like this:
|
||||
|
||||
~/.config/dangerzone/signatures/
|
||||
└── <pubkey-hash>
|
||||
└── <image-hash>.json
|
||||
└── <image-hash>.json
|
||||
└── <pubkey-digest>
|
||||
└── <image-digest>.json
|
||||
└── <image-digest>.json
|
||||
|
||||
The format used in the `.json` file is the one of `cosign download
|
||||
signature`, which differs from the "bundle" one used afterwards.
|
||||
|
@ -291,22 +298,22 @@ def store_signatures(signatures: list[Dict], image_hash: str, pubkey: str) -> No
|
|||
payload = json.loads(b64decode(sig["Payload"]))
|
||||
return payload["critical"]["image"]["docker-manifest-digest"]
|
||||
|
||||
# All the signatures should share the same hash.
|
||||
hashes = list(map(_get_digest, signatures))
|
||||
if len(set(hashes)) != 1:
|
||||
raise errors.InvalidSignatures("Signatures do not share the same image hash")
|
||||
# All the signatures should share the same digest.
|
||||
digests = list(map(_get_digest, signatures))
|
||||
if len(set(digests)) != 1:
|
||||
raise errors.InvalidSignatures("Signatures do not share the same image digest")
|
||||
|
||||
if f"sha256:{image_hash}" != hashes[0]:
|
||||
if f"sha256:{image_digest}" != digests[0]:
|
||||
raise errors.SignatureMismatch(
|
||||
f"Signatures do not match the given image hash ({image_hash}, {hashes[0]})"
|
||||
f"Signatures do not match the given image digest ({image_digest}, {digests[0]})"
|
||||
)
|
||||
|
||||
pubkey_signatures = SIGNATURES_PATH / get_file_hash(pubkey)
|
||||
pubkey_signatures = SIGNATURES_PATH / get_file_digest(pubkey)
|
||||
pubkey_signatures.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
with open(pubkey_signatures / f"{image_hash}.json", "w") as f:
|
||||
with open(pubkey_signatures / f"{image_digest}.json", "w") as f:
|
||||
log.info(
|
||||
f"Storing signatures for {image_hash} in {pubkey_signatures}/{image_hash}.json"
|
||||
f"Storing signatures for {image_digest} in {pubkey_signatures}/{image_digest}.json"
|
||||
)
|
||||
json.dump(signatures, f)
|
||||
|
||||
|
@ -317,28 +324,28 @@ def verify_local_image(image: str, pubkey: str) -> bool:
|
|||
"""
|
||||
log.info(f"Verifying local image {image} against pubkey {pubkey}")
|
||||
try:
|
||||
image_hash = runtime.get_local_image_hash(image)
|
||||
image_digest = runtime.get_local_image_digest(image)
|
||||
except subprocess.CalledProcessError:
|
||||
raise errors.ImageNotFound(f"The image {image} does not exist locally")
|
||||
|
||||
log.debug(f"Image hash: {image_hash}")
|
||||
signatures = load_signatures(image_hash, pubkey)
|
||||
log.debug(f"Image digest: {image_digest}")
|
||||
signatures = load_signatures(image_digest, pubkey)
|
||||
if len(signatures) < 1:
|
||||
raise errors.LocalSignatureNotFound("No signatures found")
|
||||
|
||||
for signature in signatures:
|
||||
if not verify_signature(signature, image_hash, pubkey):
|
||||
if not verify_signature(signature, image_digest, pubkey):
|
||||
msg = f"Unable to verify signature for {image} with pubkey {pubkey}"
|
||||
raise errors.SignatureVerificationError(msg)
|
||||
return True
|
||||
|
||||
|
||||
def get_remote_signatures(image: str, hash: str) -> List[Dict]:
|
||||
def get_remote_signatures(image: str, digest: str) -> List[Dict]:
|
||||
"""Retrieve the signatures from the registry, via `cosign download`."""
|
||||
cosign.ensure_installed()
|
||||
|
||||
process = subprocess.run(
|
||||
["cosign", "download", "signature", f"{image}@sha256:{hash}"],
|
||||
["cosign", "download", "signature", f"{image}@sha256:{digest}"],
|
||||
capture_output=True,
|
||||
check=True,
|
||||
)
|
||||
|
|
|
@ -4,6 +4,7 @@ import argparse
|
|||
import hashlib
|
||||
import logging
|
||||
import pathlib
|
||||
import platform
|
||||
import stat
|
||||
import subprocess
|
||||
import sys
|
||||
|
@ -11,8 +12,20 @@ import urllib.request
|
|||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
DIFFOCI_URL = "https://github.com/reproducible-containers/diffoci/releases/download/v0.1.5/diffoci-v0.1.5.linux-amd64"
|
||||
DIFFOCI_CHECKSUM = "01d25fe690196945a6bd510d30559338aa489c034d3a1b895a0d82a4b860698f"
|
||||
DIFFOCI_VERSION = "v0.1.5"
|
||||
# https://github.com/reproducible-containers/diffoci/releases/download/v0.1.5/SHA256SUMS
|
||||
DIFFOCI_CHECKSUMS = """
|
||||
ae171821b18c3b9e5cd1953323e79fe5ec1e972e9586474b18227b2cd052e695 diffoci-v0.1.5.darwin-amd64
|
||||
fadabdac9be45fb3dfe2a53986422e53dcc6e1fdc8062713c5760e8959a37c2b diffoci-v0.1.5.darwin-arm64
|
||||
01d25fe690196945a6bd510d30559338aa489c034d3a1b895a0d82a4b860698f diffoci-v0.1.5.linux-amd64
|
||||
5cbc5d13b51183e2988ee0f406d428eb846d51b7c2c12ae17d0775371f43103e diffoci-v0.1.5.linux-arm-v7
|
||||
2d067bd1af8a26b2c206c6bf2bde9bcb21062ddb5dc575e110e0e1a93d0d065f diffoci-v0.1.5.linux-arm64
|
||||
0923f0c01f270c596fea9f84e529af958d6caba3fa0f6bf4f03df2a12f23b3fc diffoci-v0.1.5.linux-ppc64le
|
||||
5821cbc299a90caa167c3a91465292907077ca1123375f88165a842b8970e710 diffoci-v0.1.5.linux-riscv64
|
||||
917d7f23d2bd8fcc755cb2f722fc50ffd83389e04838c3b6e9c3463ea96a9be1 diffoci-v0.1.5.linux-s390x
|
||||
"""
|
||||
DIFFOCI_URL = "https://github.com/reproducible-containers/diffoci/releases/download/{version}/diffoci-{version}.{arch}"
|
||||
|
||||
DIFFOCI_PATH = (
|
||||
pathlib.Path.home() / ".local" / "share" / "dangerzone-dev" / "helpers" / "diffoci"
|
||||
)
|
||||
|
@ -44,12 +57,31 @@ def git_verify(commit, source):
|
|||
)
|
||||
|
||||
|
||||
def get_platform_arch():
|
||||
system = platform.system().lower()
|
||||
arch = platform.machine().lower()
|
||||
if arch == "x86_64":
|
||||
arch = "amd64"
|
||||
return f"{system}-{arch}"
|
||||
|
||||
|
||||
def parse_checksums():
|
||||
lines = [
|
||||
line.replace(f"diffoci-{DIFFOCI_VERSION}.", "").split(" ")
|
||||
for line in DIFFOCI_CHECKSUMS.split("\n")
|
||||
if line
|
||||
]
|
||||
return {arch: checksum for checksum, arch in lines}
|
||||
|
||||
|
||||
def diffoci_hash_matches(diffoci):
|
||||
"""Check if the hash of the downloaded diffoci bin matches the expected one."""
|
||||
arch = get_platform_arch()
|
||||
expected_checksum = parse_checksums().get(arch)
|
||||
m = hashlib.sha256()
|
||||
m.update(diffoci)
|
||||
diffoci_checksum = m.hexdigest()
|
||||
return diffoci_checksum == DIFFOCI_CHECKSUM
|
||||
return diffoci_checksum == expected_checksum
|
||||
|
||||
|
||||
def diffoci_is_installed():
|
||||
|
@ -66,7 +98,9 @@ def diffoci_is_installed():
|
|||
|
||||
def diffoci_download():
|
||||
"""Download the diffoci tool, based on a URL and its checksum."""
|
||||
with urllib.request.urlopen(DIFFOCI_URL) as f:
|
||||
download_url = DIFFOCI_URL.format(version=DIFFOCI_VERSION, arch=get_platform_arch())
|
||||
logger.info(f"Downloading diffoci helper from {download_url}")
|
||||
with urllib.request.urlopen(download_url) as f:
|
||||
diffoci_bin = f.read()
|
||||
|
||||
if not diffoci_hash_matches(diffoci_bin):
|
||||
|
@ -153,7 +187,6 @@ def main():
|
|||
git_verify(commit, args.source)
|
||||
|
||||
if not diffoci_is_installed():
|
||||
logger.info(f"Downloading diffoci helper from {DIFFOCI_URL}")
|
||||
diffoci_download()
|
||||
|
||||
tag = f"reproduce-{commit}"
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
# Independent Container Updates
|
||||
|
||||
Since version 0.9.0, Dangerzone is able to ship container images independently
|
||||
from releases.
|
||||
from releases of the software.
|
||||
|
||||
One of the main benefits of doing so is to lower the time needed to patch security issues inside the containers.
|
||||
One of the main benefits of doing so is to shorten the time neede to distribute the security fixes for the containers. Being the place where the actual conversion of documents happen, it's a way to keep dangerzone users secure.
|
||||
|
||||
If you are a dangerzone user, this all happens behind the curtain, and you should not have to know anything about that to enjoy these "in-app" updates. If you are using dangerzone in an air-gapped environment, check the sections below.
|
||||
|
||||
## Checking attestations
|
||||
|
||||
|
@ -20,7 +22,7 @@ In case of sucess, it will report back:
|
|||
|
||||
```
|
||||
🎉 Successfully verified image
|
||||
'ghcr.io/apyrgio/dangerzone/dangerzone:20250129-0.8.0-149-gbf2f5ac@sha256:4da441235e84e93518778827a5c5745d532d7a4079886e1647924bee7ef1c14d'
|
||||
'ghcr.io/freedomofpress/dangerzone/dangerzone:20250129-0.8.0-149-gbf2f5ac@sha256:4da441235e84e93518778827a5c5745d532d7a4079886e1647924bee7ef1c14d'
|
||||
and its associated claims:
|
||||
- ✅ SLSA Level 3 provenance
|
||||
- ✅ GitHub repo: apyrgio/dangerzone
|
||||
|
@ -29,6 +31,19 @@ and its associated claims:
|
|||
- ✅ Git commit: bf2f5accc24bd15a4f5c869a7f0b03b8fe48dfb6
|
||||
```
|
||||
|
||||
## Sign and publish the remote image
|
||||
|
||||
Once the image has been reproduced locally, we can add a signature to the container registry,
|
||||
and update the `latest` tag to point to the proper hash.
|
||||
|
||||
```bash
|
||||
cosign sign --sk ghcr.io/freedomofpress/dangerzone/dangerzone:20250129-0.8.0-149-gbf2f5ac@sha256:4da441235e84e93518778827a5c5745d532d7a4079886e1647924bee7ef1c14d
|
||||
|
||||
# And mark bump latest
|
||||
crane auth login ghcr.io -u USERNAME --password $(cat pat_token)
|
||||
crane tag ghcr.io/freedomofpress/dangerzone/dangerzone@sha256:4da441235e84e93518778827a5c5745d532d7a4079886e1647924bee7ef1c14d latest
|
||||
```
|
||||
|
||||
## Install updates
|
||||
|
||||
To check if a new container image has been released, and update your local installation with it, you can use the following commands:
|
||||
|
@ -37,7 +52,7 @@ To check if a new container image has been released, and update your local insta
|
|||
dangerzone-image upgrade ghcr.io/almet/dangerzone/dangerzone
|
||||
```
|
||||
|
||||
## Verify local
|
||||
## Verify locally
|
||||
|
||||
You can verify that the image you have locally matches the stored signatures, and that these have been signed with a trusted public key:
|
||||
|
||||
|
@ -45,9 +60,15 @@ You can verify that the image you have locally matches the stored signatures, an
|
|||
dangerzone-image verify-local ghcr.io/almet/dangerzone/dangerzone
|
||||
```
|
||||
|
||||
## Air-gapped environments
|
||||
## Installing image updates to air-gapped environments
|
||||
|
||||
In order to make updates on an air-gapped environment, you will need to prepare an archive for the air-gapped environment. This archive will contain all the needed material to validate that the new container image has been signed and is valid.
|
||||
Three steps are required:
|
||||
|
||||
1. Prepare the archive
|
||||
2. Transfer the archive to the air-gapped system
|
||||
3. Install the archive on the air-gapped system
|
||||
|
||||
This archive will contain all the needed material to validate that the new container image has been signed and is valid.
|
||||
|
||||
On the machine on which you prepare the packages:
|
||||
|
||||
|
|
Loading…
Reference in a new issue