From e1bdb75435825105a9a8619e77a2bbcd6fdf38d8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Tue, 11 Feb 2025 18:13:39 +0100 Subject: [PATCH 01/24] Add a `dangerzone-image` CLI script It contains utilities to interact with OCI registries, like getting the list of published tags and getting the content of a manifest. It does so via the use of the Docker Registry API v2 [0]. The script has been added to the `dev_scripts`, and is also installed on the system under `dangerzone-image`. [0] https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry --- dangerzone/updater/__init__.py | 3 + dangerzone/updater/cli.py | 42 ++++++++++++ dangerzone/updater/errors.py | 10 +++ dangerzone/updater/registry.py | 116 +++++++++++++++++++++++++++++++++ dev_scripts/dangerzone-image | 13 ++++ pyproject.toml | 1 + 6 files changed, 185 insertions(+) create mode 100644 dangerzone/updater/__init__.py create mode 100644 dangerzone/updater/cli.py create mode 100644 dangerzone/updater/errors.py create mode 100644 dangerzone/updater/registry.py create mode 100755 dev_scripts/dangerzone-image diff --git a/dangerzone/updater/__init__.py b/dangerzone/updater/__init__.py new file mode 100644 index 0000000..3988bf1 --- /dev/null +++ b/dangerzone/updater/__init__.py @@ -0,0 +1,3 @@ +import logging + +log = logging.getLogger(__name__) diff --git a/dangerzone/updater/cli.py b/dangerzone/updater/cli.py new file mode 100644 index 0000000..1c9f85b --- /dev/null +++ b/dangerzone/updater/cli.py @@ -0,0 +1,42 @@ +#!/usr/bin/python + +import logging + +import click + +from . import attestations, errors, log, registry, signatures + +DEFAULT_REPOSITORY = "freedomofpress/dangerzone" +DEFAULT_BRANCH = "main" +DEFAULT_IMAGE_NAME = "ghcr.io/freedomofpress/dangerzone/dangerzone" + + +@click.group() +@click.option("--debug", is_flag=True) +def main(debug: bool) -> None: + if debug: + click.echo("Debug mode enabled") + level = logging.DEBUG + else: + level = logging.INFO + logging.basicConfig(level=level) + + +@main.command() +@click.argument("image") +def list_remote_tags(image: str) -> None: + """List the tags available for a given image.""" + click.echo(f"Existing tags for {image}") + for tag in registry.list_tags(image): + click.echo(tag) + + +@main.command() +@click.argument("image") +def get_manifest(image: str) -> None: + """Retrieves a remote manifest for a given image and displays it.""" + click.echo(registry.get_manifest(image).content) + + +if __name__ == "__main__": + main() diff --git a/dangerzone/updater/errors.py b/dangerzone/updater/errors.py new file mode 100644 index 0000000..1587e73 --- /dev/null +++ b/dangerzone/updater/errors.py @@ -0,0 +1,10 @@ +class UpdaterError(Exception): + pass + + +class ImageNotFound(UpdaterError): + pass + + +class RegistryError(UpdaterError): + pass diff --git a/dangerzone/updater/registry.py b/dangerzone/updater/registry.py new file mode 100644 index 0000000..fe57364 --- /dev/null +++ b/dangerzone/updater/registry.py @@ -0,0 +1,116 @@ +import re +from collections import namedtuple +from hashlib import sha256 +from typing import Dict, Optional, Tuple + +import requests + +from . import errors, log + +__all__ = [ + "get_manifest_digest", + "list_tags", + "get_manifest", + "parse_image_location", +] + +SIGSTORE_BUNDLE = "application/vnd.dev.sigstore.bundle.v0.3+json" +IMAGE_INDEX_MEDIA_TYPE = "application/vnd.oci.image.index.v1+json" +ACCEPT_MANIFESTS_HEADER = ",".join( + [ + "application/vnd.docker.distribution.manifest.v1+json", + "application/vnd.docker.distribution.manifest.v1+prettyjws", + "application/vnd.docker.distribution.manifest.v2+json", + "application/vnd.oci.image.manifest.v1+json", + "application/vnd.docker.distribution.manifest.list.v2+json", + IMAGE_INDEX_MEDIA_TYPE, + ] +) + + +Image = namedtuple("Image", ["registry", "namespace", "image_name", "tag", "digest"]) + + +def parse_image_location(input_string: str) -> Image: + """Parses container image location into an Image namedtuple""" + pattern = ( + r"^" + r"(?P[a-zA-Z0-9.-]+)/" + r"(?P[a-zA-Z0-9-]+)/" + r"(?P[^:@]+)" + r"(?::(?P[a-zA-Z0-9.-]+))?" + r"(?:@(?Psha256:[a-zA-Z0-9]+))?" + r"$" + ) + match = re.match(pattern, input_string) + if not match: + raise ValueError("Malformed image location") + return Image( + registry=match.group("registry"), + namespace=match.group("namespace"), + image_name=match.group("image_name"), + tag=match.group("tag") or "latest", + digest=match.group("digest"), + ) + + +def _get_auth_header(image: Image) -> Dict[str, str]: + auth_url = f"https://{image.registry}/token" + response = requests.get( + auth_url, + params={ + "service": f"{image.registry}", + "scope": f"repository:{image.namespace}/{image.image_name}:pull", + }, + ) + response.raise_for_status() + token = response.json()["token"] + return {"Authorization": f"Bearer {token}"} + + +def _url(image: Image) -> str: + return f"https://{image.registry}/v2/{image.namespace}/{image.image_name}" + + +def list_tags(image_str: str) -> list: + image = parse_image_location(image_str) + url = f"{_url(image)}/tags/list" + response = requests.get(url, headers=_get_auth_header(image)) + response.raise_for_status() + tags = response.json().get("tags", []) + return tags + + +def get_manifest(image_str: str) -> requests.Response: + """Get manifest information for a specific tag""" + image = parse_image_location(image_str) + manifest_url = f"{_url(image)}/manifests/{image.tag}" + headers = { + "Accept": ACCEPT_MANIFESTS_HEADER, + } + headers.update(_get_auth_header(image)) + + response = requests.get(manifest_url, headers=headers) + response.raise_for_status() + return response + + +def list_manifests(image_str: str) -> list: + return get_manifest(image_str).json().get("manifests") + + +def get_blob(image: Image, digest: str) -> requests.Response: + response = requests.get( + f"{_url(image)}/blobs/{digest}", headers=_get_auth_header(image) + ) + response.raise_for_status() + return response + + +def get_manifest_digest( + image_str: str, tag_manifest_content: Optional[bytes] = None +) -> str: + if not tag_manifest_content: + tag_manifest_content = get_manifest(image_str).content + + return sha256(tag_manifest_content).hexdigest() diff --git a/dev_scripts/dangerzone-image b/dev_scripts/dangerzone-image new file mode 100755 index 0000000..5467207 --- /dev/null +++ b/dev_scripts/dangerzone-image @@ -0,0 +1,13 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +import os +import sys + +# Load dangerzone module and resources from the source code tree +sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) +sys.dangerzone_dev = True + +from dangerzone.updater import cli + +cli.main() diff --git a/pyproject.toml b/pyproject.toml index c54bf2d..ddc8ee5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -27,6 +27,7 @@ packaging = "*" [tool.poetry.scripts] dangerzone = 'dangerzone:main' dangerzone-cli = 'dangerzone:main' +dangerzone-image = "dangerzone.updater.cli:main" # Dependencies required for packaging the code on various platforms. [tool.poetry.group.package.dependencies] From a87fd4338bf9e1fdd369e2766e8b4a7ff9cafc05 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Tue, 11 Feb 2025 18:22:43 +0100 Subject: [PATCH 02/24] Download and verify cosign signatures Signatures are stored in the OCI Manifest v2 registry [0], and are expected to follow the Cosign Signature Specification [0] The following CLI utilities are provided with `dangerzone-image`: For checking new container images, upgrading them and downloading them: - `upgrade` allows to upgrade the current installed image to the last one available on the OCI registry, downloading and storing the signatures in the process. - `verify-local` allows the verify the currently installed image against downloaded signatures and public key. To prepare and install archives on air-gapped environments: - `prepare-archive` helps to prepare an archive to install on another machine - `load-archive` helps upgrade the local image to the archive given in argument. Signatures are stored locally using the format provided by `cosign download signature`, and the Rekor log index is used to ensure the requested-to-install container image is fresher than the one already present on the system. [0] https://github.com/sigstore/cosign/blob/main/specs/SIGNATURE_SPEC.md --- dangerzone/container_utils.py | 75 ++++- dangerzone/errors.py | 22 +- dangerzone/updater/cli.py | 66 +++++ dangerzone/updater/cosign.py | 32 +++ dangerzone/updater/errors.py | 48 ++++ dangerzone/updater/signatures.py | 456 +++++++++++++++++++++++++++++++ tests/conftest.py | 1 - 7 files changed, 687 insertions(+), 13 deletions(-) create mode 100644 dangerzone/updater/cosign.py create mode 100644 dangerzone/updater/signatures.py diff --git a/dangerzone/container_utils.py b/dangerzone/container_utils.py index e7d60ff..f2d1e19 100644 --- a/dangerzone/container_utils.py +++ b/dangerzone/container_utils.py @@ -10,7 +10,8 @@ from . import errors from .settings import Settings from .util import get_resource_path, get_subprocess_startupinfo -CONTAINER_NAME = "dangerzone.rocks/dangerzone" +OLD_CONTAINER_NAME = "dangerzone.rocks/dangerzone" +CONTAINER_NAME = "ghcr.io/freedomofpress/dangerzone/dangerzone" log = logging.getLogger(__name__) @@ -149,12 +150,6 @@ def delete_image_tag(tag: str) -> None: ) -def get_expected_tag() -> str: - """Get the tag of the Dangerzone image tarball from the image-id.txt file.""" - with get_resource_path("image-id.txt").open() as f: - return f.read().strip() - - def load_image_tarball() -> None: runtime = Runtime() log.info("Installing Dangerzone container image...") @@ -199,3 +194,69 @@ def load_image_tarball() -> None: delete_image_tag(bad_tag) log.info("Successfully installed container image") + + +def tag_image_by_digest(digest: str, tag: str) -> None: + """Tag a container image by digest. + The sha256: prefix should be omitted from the digest. + """ + runtime = Runtime() + image_id = get_image_id_by_digest(digest) + cmd = [str(runtime.path), "tag", image_id, tag] + log.debug(" ".join(cmd)) + subprocess.run(cmd, startupinfo=get_subprocess_startupinfo(), check=True) + + +def get_image_id_by_digest(digest: str) -> str: + """Get an image ID from a digest. + The sha256: prefix should be omitted from the digest. + """ + runtime = Runtime() + cmd = [ + str(runtime.path), + "images", + "-f", + f"digest=sha256:{digest}", + "--format", + "{{.Id}}", + ] + log.debug(" ".join(cmd)) + process = subprocess.run( + cmd, startupinfo=get_subprocess_startupinfo(), check=True, capture_output=True + ) + # In case we have multiple lines, we only want the first one. + return process.stdout.decode().strip().split("\n")[0] + + +def container_pull(image: str, manifest_digest: str): + """Pull a container image from a registry.""" + runtime = Runtime() + cmd = [str(runtime.path), "pull", f"{image}@sha256:{manifest_digest}"] + process = subprocess.Popen(cmd, stdout=subprocess.PIPE) + process.communicate() + if process.returncode != 0: + raise errors.ContainerPullException(f"Could not pull the container image: {e}") + + +def get_local_image_digest(image: str) -> str: + """ + Returns a image hash from a local image name + """ + # Get the image hash from the "podman images" command. + # It's not possible to use "podman inspect" here as it + # returns the digest of the architecture-bound image + runtime = Runtime() + cmd = [str(runtime.path), "images", image, "--format", "{{.Digest}}"] + log.debug(" ".join(cmd)) + try: + result = subprocess.run(cmd, capture_output=True, check=True) + lines = result.stdout.decode().strip().split("\n") + if len(lines) != 1: + raise errors.MultipleImagesFoundException( + f"Expected a single line of output, got {len(lines)} lines" + ) + return lines[0].replace("sha256:", "") + except subprocess.CalledProcessError as e: + raise errors.ImageNotPresentException( + f"The image {image} does not exist locally" + ) diff --git a/dangerzone/errors.py b/dangerzone/errors.py index c1c2849..21fe807 100644 --- a/dangerzone/errors.py +++ b/dangerzone/errors.py @@ -122,25 +122,37 @@ def handle_document_errors(func: F) -> F: #### Container-related errors -class ImageNotPresentException(Exception): +class ContainerException(Exception): pass -class ImageInstallationException(Exception): +class ImageNotPresentException(ContainerException): pass -class NoContainerTechException(Exception): +class MultipleImagesFoundException(ContainerException): + pass + + +class ImageInstallationException(ContainerException): + pass + + +class NoContainerTechException(ContainerException): def __init__(self, container_tech: str) -> None: super().__init__(f"{container_tech} is not installed") -class NotAvailableContainerTechException(Exception): +class NotAvailableContainerTechException(ContainerException): def __init__(self, container_tech: str, error: str) -> None: self.error = error self.container_tech = container_tech super().__init__(f"{container_tech} is not available") -class UnsupportedContainerRuntime(Exception): +class UnsupportedContainerRuntime(ContainerException): + pass + + +class ContainerPullException(ContainerException): pass diff --git a/dangerzone/updater/cli.py b/dangerzone/updater/cli.py index 1c9f85b..e496aaf 100644 --- a/dangerzone/updater/cli.py +++ b/dangerzone/updater/cli.py @@ -22,6 +22,72 @@ def main(debug: bool) -> None: logging.basicConfig(level=level) +@main.command() +@click.argument("image", default=DEFAULT_IMAGE_NAME) +@click.option("--pubkey", default=signatures.DEFAULT_PUBKEY_LOCATION) +def upgrade(image: str, pubkey: str) -> None: + """Upgrade the image to the latest signed version.""" + manifest_digest = registry.get_manifest_digest(image) + try: + is_upgraded = signatures.upgrade_container_image(image, manifest_digest, pubkey) + if is_upgraded: + click.echo(f"✅ The local image {image} has been upgraded") + click.echo(f"✅ The image has been signed with {pubkey}") + click.echo(f"✅ Signatures has been verified and stored locally") + + except errors.ImageAlreadyUpToDate as e: + click.echo(f"✅ {e}") + raise click.Abort() + except Exception as e: + click.echo(f"❌ {e}") + raise click.Abort() + + +@main.command() +@click.argument("image_filename") +@click.option("--pubkey", default=signatures.DEFAULT_PUBKEY_LOCATION) +def load_archive(image_filename: str, pubkey: str) -> None: + """Upgrade the local image to the one in the archive.""" + try: + loaded_image = signatures.upgrade_container_image_airgapped( + image_filename, pubkey + ) + click.echo( + f"✅ Installed image {image_filename} on the system as {loaded_image}" + ) + except errors.ImageAlreadyUpToDate as e: + click.echo(f"✅ {e}") + raise click.Abort() + + +@main.command() +@click.argument("image") +@click.option("--output", default="dangerzone-airgapped.tar") +def prepare_archive(image: str, output: str) -> None: + """Prepare an archive to upgrade the dangerzone image on an airgapped environment.""" + signatures.prepare_airgapped_archive(image, output) + click.echo(f"✅ Archive {output} created") + + +@main.command() +@click.argument("image", default=DEFAULT_IMAGE_NAME) +@click.option("--pubkey", default=signatures.DEFAULT_PUBKEY_LOCATION) +def verify_local(image: str, pubkey: str) -> None: + """ + Verify the local image signature against a public key and the stored signatures. + """ + # XXX remove a potentiel :tag + if signatures.verify_local_image(image, pubkey): + click.echo( + ( + f"Verifying the local image:\n\n" + f"pubkey: {pubkey}\n" + f"image: {image}\n\n" + f"✅ The local image {image} has been signed with {pubkey}" + ) + ) + + @main.command() @click.argument("image") def list_remote_tags(image: str) -> None: diff --git a/dangerzone/updater/cosign.py b/dangerzone/updater/cosign.py new file mode 100644 index 0000000..9abcc84 --- /dev/null +++ b/dangerzone/updater/cosign.py @@ -0,0 +1,32 @@ +import subprocess + +from . import errors, log + + +def ensure_installed() -> None: + try: + subprocess.run(["cosign", "version"], capture_output=True, check=True) + except subprocess.CalledProcessError: + raise errors.CosignNotInstalledError() + + +def verify_local_image(oci_image_folder: str, pubkey: str) -> bool: + """Verify the given path against the given public key""" + + ensure_installed() + cmd = [ + "cosign", + "verify", + "--key", + pubkey, + "--offline", + "--local-image", + oci_image_folder, + ] + log.debug(" ".join(cmd)) + result = subprocess.run(cmd, capture_output=True) + if result.returncode == 0: + log.info("Signature verified") + return True + log.info("Failed to verify signature", result.stderr) + return False diff --git a/dangerzone/updater/errors.py b/dangerzone/updater/errors.py index 1587e73..6b75c0e 100644 --- a/dangerzone/updater/errors.py +++ b/dangerzone/updater/errors.py @@ -2,9 +2,57 @@ class UpdaterError(Exception): pass +class ImageAlreadyUpToDate(UpdaterError): + pass + + class ImageNotFound(UpdaterError): pass +class SignatureError(UpdaterError): + pass + + class RegistryError(UpdaterError): pass + + +class AirgappedImageDownloadError(UpdaterError): + pass + + +class NoRemoteSignatures(SignatureError): + pass + + +class SignatureVerificationError(SignatureError): + pass + + +class SignatureExtractionError(SignatureError): + pass + + +class SignaturesFolderDoesNotExist(SignatureError): + pass + + +class InvalidSignatures(SignatureError): + pass + + +class SignatureMismatch(SignatureError): + pass + + +class LocalSignatureNotFound(SignatureError): + pass + + +class CosignNotInstalledError(SignatureError): + pass + + +class InvalidLogIndex(SignatureError): + pass diff --git a/dangerzone/updater/signatures.py b/dangerzone/updater/signatures.py new file mode 100644 index 0000000..27cd55d --- /dev/null +++ b/dangerzone/updater/signatures.py @@ -0,0 +1,456 @@ +import json +import platform +import re +import subprocess +import tarfile +from base64 import b64decode, b64encode +from functools import reduce +from hashlib import sha256 +from io import BytesIO +from pathlib import Path +from tempfile import NamedTemporaryFile, TemporaryDirectory +from typing import Dict, List, Optional, Tuple + +from .. import container_utils as runtime +from .. import errors as dzerrors +from ..util import get_resource_path +from . import cosign, errors, log, registry + +try: + import platformdirs +except ImportError: + import appdirs as platformdirs # type: ignore[no-redef] + + +def get_config_dir() -> Path: + return Path(platformdirs.user_config_dir("dangerzone")) + + +# XXX Store this somewhere else. +DEFAULT_PUBKEY_LOCATION = get_resource_path("freedomofpress-dangerzone-pub.key") +SIGNATURES_PATH = get_config_dir() / "signatures" +LAST_LOG_INDEX = SIGNATURES_PATH / "last_log_index" + +__all__ = [ + "verify_signature", + "load_signatures", + "store_signatures", + "verify_offline_image_signature", +] + + +def signature_to_bundle(sig: Dict) -> Dict: + """Convert a cosign-download signature to the format expected by cosign bundle.""" + bundle = sig["Bundle"] + payload = bundle["Payload"] + return { + "base64Signature": sig["Base64Signature"], + "Payload": sig["Payload"], + "cert": sig["Cert"], + "chain": sig["Chain"], + "rekorBundle": { + "SignedEntryTimestamp": bundle["SignedEntryTimestamp"], + "Payload": { + "body": payload["body"], + "integratedTime": payload["integratedTime"], + "logIndex": payload["logIndex"], + "logID": payload["logID"], + }, + }, + "RFC3161Timestamp": sig["RFC3161Timestamp"], + } + + +def verify_signature(signature: dict, image_digest: str, pubkey: str) -> bool: + """Verify a signature against a given public key""" + # XXX - Also verfy the identity/docker-reference field against the expected value + # e.g. ghcr.io/freedomofpress/dangerzone/dangerzone + + cosign.ensure_installed() + signature_bundle = signature_to_bundle(signature) + + payload_bytes = b64decode(signature_bundle["Payload"]) + payload_digest = json.loads(payload_bytes)["critical"]["image"][ + "docker-manifest-digest" + ] + if payload_digest != f"sha256:{image_digest}": + raise errors.SignatureMismatch( + f"The signature does not match the image digest ({payload_digest}, {image_digest})" + ) + + with ( + NamedTemporaryFile(mode="w") as signature_file, + NamedTemporaryFile(mode="bw") as payload_file, + ): + json.dump(signature_bundle, signature_file) + signature_file.flush() + + payload_file.write(payload_bytes) + payload_file.flush() + + cmd = [ + "cosign", + "verify-blob", + "--key", + pubkey, + "--bundle", + signature_file.name, + payload_file.name, + ] + log.debug(" ".join(cmd)) + result = subprocess.run(cmd, capture_output=True) + if result.returncode != 0: + # XXX Raise instead? + log.debug("Failed to verify signature", result.stderr) + raise errors.SignatureVerificationError("Failed to verify signature") + if result.stderr == b"Verified OK\n": + log.debug("Signature verified") + return True + return False + + +class Signature: + def __init__(self, signature: Dict): + self.signature = signature + + @property + def payload(self) -> Dict: + return json.loads(b64decode(self.signature["Payload"])) + + @property + def manifest_digest(self) -> str: + full_digest = self.payload["critical"]["image"]["docker-manifest-digest"] + return full_digest.replace("sha256:", "") + + +def is_update_available(image: str) -> Tuple[bool, Optional[str]]: + remote_digest = registry.get_manifest_digest(image) + try: + local_digest = runtime.get_local_image_digest(image) + except dzerrors.ImageNotPresentException: + log.debug("No local image found") + return True, remote_digest + log.debug("Remote digest: %s", remote_digest) + log.debug("Local digest: %s", local_digest) + has_update = remote_digest != local_digest + if has_update: + return True, remote_digest + return False, None + + +def verify_signatures( + signatures: List[Dict], + image_digest: str, + pubkey: str, +) -> bool: + for signature in signatures: + if not verify_signature(signature, image_digest, pubkey): + raise errors.SignatureVerificationError() + return True + + +def get_last_log_index() -> int: + SIGNATURES_PATH.mkdir(parents=True, exist_ok=True) + if not LAST_LOG_INDEX.exists(): + return 0 + + with open(LAST_LOG_INDEX) as f: + return int(f.read()) + + +def get_log_index_from_signatures(signatures: List[Dict]) -> int: + return reduce( + lambda acc, sig: max(acc, sig["Bundle"]["Payload"]["logIndex"]), signatures, 0 + ) + + +def write_log_index(log_index: int) -> None: + last_log_index_path = SIGNATURES_PATH / "last_log_index" + + with open(last_log_index_path, "w") as f: + f.write(str(log_index)) + + +def _get_blob(tmpdir: str, digest: str) -> Path: + return Path(tmpdir) / "blobs" / "sha256" / digest.replace("sha256:", "") + + +def upgrade_container_image_airgapped(container_tar: str, pubkey: str) -> str: + """ + Verify the given archive against its self-contained signatures, then + upgrade the image and retag it to the expected tag. + + Right now, the archive is extracted and reconstructed, requiring some space + on the filesystem. + + :return: The loaded image name + """ + + # XXX Use a memory buffer instead of the filesystem + with TemporaryDirectory() as tmpdir: + + def _get_signature_filename(manifests: List[Dict]) -> Path: + for manifest in manifests: + if ( + manifest["annotations"].get("kind") + == "dev.cosignproject.cosign/sigs" + ): + return _get_blob(tmpdir, manifest["digest"]) + raise errors.SignatureExtractionError() + + with tarfile.open(container_tar, "r") as archive: + archive.extractall(tmpdir) + + if not cosign.verify_local_image(tmpdir, pubkey): + raise errors.SignatureVerificationError() + + # Remove the signatures from the archive, otherwise podman is not able to load it + with open(Path(tmpdir) / "index.json") as f: + index_json = json.load(f) + + signature_filename = _get_signature_filename(index_json["manifests"]) + + index_json["manifests"] = [ + manifest + for manifest in index_json["manifests"] + if manifest["annotations"].get("kind") + in ("dev.cosignproject.cosign/imageIndex", "dev.cosignproject.cosign/image") + ] + + with open(signature_filename, "r") as f: + image_name, signatures = convert_oci_images_signatures(json.load(f), tmpdir) + log.info(f"Found image name: {image_name}") + + # Ensure that we only upgrade if the log index is higher than the last known one + incoming_log_index = get_log_index_from_signatures(signatures) + last_log_index = get_last_log_index() + + if incoming_log_index < last_log_index: + raise errors.InvalidLogIndex( + "The log index is not higher than the last known one" + ) + + image_digest = index_json["manifests"][0].get("digest").replace("sha256:", "") + + # Write the new index.json to the temp folder + with open(Path(tmpdir) / "index.json", "w") as f: + json.dump(index_json, f) + + with NamedTemporaryFile(suffix=".tar") as temporary_tar: + with tarfile.open(temporary_tar.name, "w") as archive: + # The root is the tmpdir + archive.add(Path(tmpdir) / "index.json", arcname="index.json") + archive.add(Path(tmpdir) / "oci-layout", arcname="oci-layout") + archive.add(Path(tmpdir) / "blobs", arcname="blobs") + + runtime.load_image_tarball_from_tar(temporary_tar.name) + runtime.tag_image_by_digest(image_digest, image_name) + + store_signatures(signatures, image_digest, pubkey) + return image_name + + +def convert_oci_images_signatures( + signatures_manifest: Dict, tmpdir: str +) -> Tuple[str, List[Dict]]: + def _to_cosign_signature(layer: Dict) -> Dict: + signature = layer["annotations"]["dev.cosignproject.cosign/signature"] + bundle = json.loads(layer["annotations"]["dev.sigstore.cosign/bundle"]) + payload_body = json.loads(b64decode(bundle["Payload"]["body"])) + + payload_location = _get_blob(tmpdir, layer["digest"]) + with open(payload_location, "rb") as f: + payload_b64 = b64encode(f.read()).decode() + + return { + "Base64Signature": payload_body["spec"]["signature"]["content"], + "Payload": payload_b64, + "Cert": None, + "Chain": None, + "Bundle": bundle, + "RFC3161Timestamp": None, + } + + layers = signatures_manifest.get("layers", []) + signatures = [_to_cosign_signature(layer) for layer in layers] + + if not signatures: + raise errors.SignatureExtractionError() + + payload_location = _get_blob(tmpdir, layers[0]["digest"]) + with open(payload_location, "r") as f: + payload = json.load(f) + image_name = payload["critical"]["identity"]["docker-reference"] + + return image_name, signatures + + +def get_file_digest(file: Optional[str] = None, content: Optional[bytes] = None) -> str: + """Get the sha256 digest of a file or content""" + if not file and not content: + raise errors.UpdaterError("No file or content provided") + if file: + with open(file, "rb") as f: + content = f.read() + if content: + return sha256(content).hexdigest() + return "" + + +def load_signatures(image_digest: str, pubkey: str) -> List[Dict]: + """ + Load signatures from the local filesystem + + See store_signatures() for the expected format. + """ + pubkey_signatures = SIGNATURES_PATH / get_file_digest(pubkey) + if not pubkey_signatures.exists(): + msg = ( + f"Cannot find a '{pubkey_signatures}' folder." + "You might need to download the image signatures first." + ) + raise errors.SignaturesFolderDoesNotExist(msg) + + with open(pubkey_signatures / f"{image_digest}.json") as f: + log.debug("Loading signatures from %s", f.name) + return json.load(f) + + +def store_signatures(signatures: list[Dict], image_digest: str, pubkey: str) -> None: + """ + Store signatures locally in the SIGNATURE_PATH folder, like this: + + ~/.config/dangerzone/signatures/ + ├── + │ ├── .json + │ ├── .json + └── last_log_index + + The last_log_index file is used to keep track of the last log index + processed by the updater. + + The format used in the `.json` file is the one of `cosign download + signature`, which differs from the "bundle" one used afterwards. + + It can be converted to the one expected by cosign verify --bundle with + the `signature_to_bundle()` function. + + This function must be used only if the provided signatures have been verified. + """ + + def _get_digest(sig: Dict) -> str: + payload = json.loads(b64decode(sig["Payload"])) + return payload["critical"]["image"]["docker-manifest-digest"] + + # All the signatures should share the same digest. + digests = list(map(_get_digest, signatures)) + if len(set(digests)) != 1: + raise errors.InvalidSignatures("Signatures do not share the same image digest") + + if f"sha256:{image_digest}" != digests[0]: + raise errors.SignatureMismatch( + f"Signatures do not match the given image digest (sha256:{image_digest}, {digests[0]})" + ) + + pubkey_signatures = SIGNATURES_PATH / get_file_digest(pubkey) + pubkey_signatures.mkdir(parents=True, exist_ok=True) + + with open(pubkey_signatures / f"{image_digest}.json", "w") as f: + log.info( + f"Storing signatures for {image_digest} in {pubkey_signatures}/{image_digest}.json" + ) + json.dump(signatures, f) + + write_log_index(get_log_index_from_signatures(signatures)) + + +def verify_local_image(image: str, pubkey: str) -> bool: + """ + Verifies that a local image has a valid signature + """ + log.info(f"Verifying local image {image} against pubkey {pubkey}") + try: + image_digest = runtime.get_local_image_digest(image) + except subprocess.CalledProcessError: + raise errors.ImageNotFound(f"The image {image} does not exist locally") + + log.debug(f"Image digest: {image_digest}") + signatures = load_signatures(image_digest, pubkey) + if len(signatures) < 1: + raise errors.LocalSignatureNotFound("No signatures found") + + for signature in signatures: + if not verify_signature(signature, image_digest, pubkey): + msg = f"Unable to verify signature for {image} with pubkey {pubkey}" + raise errors.SignatureVerificationError(msg) + return True + + +def get_remote_signatures(image: str, digest: str) -> List[Dict]: + """Retrieve the signatures from the registry, via `cosign download`.""" + cosign.ensure_installed() + + # XXX: try/catch here + process = subprocess.run( + ["cosign", "download", "signature", f"{image}@sha256:{digest}"], + capture_output=True, + check=True, + ) + + # XXX: Check the output first. + # Remove the last return, split on newlines, convert from JSON + signatures_raw = process.stdout.decode("utf-8").strip().split("\n") + signatures = list(map(json.loads, signatures_raw)) + if len(signatures) < 1: + raise errors.NoRemoteSignatures("No signatures found for the image") + return signatures + + +def prepare_airgapped_archive(image_name: str, destination: str) -> None: + if "@sha256:" not in image_name: + raise errors.AirgappedImageDownloadError( + "The image name must include a digest, e.g. ghcr.io/freedomofpress/dangerzone/dangerzone@sha256:123456" + ) + + cosign.ensure_installed() + # Get the image from the registry + + with TemporaryDirectory() as tmpdir: + msg = f"Downloading image {image_name}. \nIt might take a while." + log.info(msg) + + process = subprocess.run( + ["cosign", "save", image_name, "--dir", tmpdir], + capture_output=True, + check=True, + ) + if process.returncode != 0: + raise errors.AirgappedImageDownloadError() + + with tarfile.open(destination, "w") as archive: + archive.add(tmpdir, arcname=".") + + +def upgrade_container_image(image: str, manifest_digest: str, pubkey: str) -> str: + """Verify and upgrade the image to the latest, if signed.""" + update_available, _ = is_update_available(image) + if not update_available: + raise errors.ImageAlreadyUpToDate("The image is already up to date") + + signatures = get_remote_signatures(image, manifest_digest) + verify_signatures(signatures, manifest_digest, pubkey) + + # Only upgrade if the log index is higher than the last known one + incoming_log_index = get_log_index_from_signatures(signatures) + last_log_index = get_last_log_index() + + if incoming_log_index < last_log_index: + raise errors.InvalidLogIndex( + "Trying to upgrade to an image with a lower log index" + ) + + runtime.container_pull(image, manifest_digest) + + # Store the signatures just now to avoid storing them unverified + store_signatures(signatures, manifest_digest, pubkey) + return manifest_digest diff --git a/tests/conftest.py b/tests/conftest.py index 4a80f17..b55b5ca 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -132,7 +132,6 @@ for_each_doc = pytest.mark.parametrize( "doc", test_docs, ids=[str(doc.name) for doc in test_docs] ) - # External Docs - base64 docs encoded for externally sourced documents # XXX to reduce the chance of accidentally opening them test_docs_external_dir = Path(__file__).parent.joinpath(SAMPLE_EXTERNAL_DIRECTORY) From a9fec44837dacabe9ff9d5877f8962fef2f7511a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Wed, 26 Feb 2025 16:09:58 +0100 Subject: [PATCH 03/24] Introduce a `subprocess_run` utility function This is done to avoid forgetting windows specific arguments when calling `subprocess.run`. --- dangerzone/container_utils.py | 32 +-- dangerzone/updater/cli.py | 9 +- dangerzone/updater/signatures.py | 37 +-- tests/test_signatures.py | 385 +++++++++++++++++++++++++++++++ 4 files changed, 429 insertions(+), 34 deletions(-) create mode 100644 tests/test_signatures.py diff --git a/dangerzone/container_utils.py b/dangerzone/container_utils.py index f2d1e19..162901e 100644 --- a/dangerzone/container_utils.py +++ b/dangerzone/container_utils.py @@ -56,6 +56,11 @@ class Runtime(object): return "podman" if platform.system() == "Linux" else "docker" +def subprocess_run(*args, **kwargs) -> subprocess.CompletedProcess: + """subprocess.run with the correct startupinfo for Windows.""" + return subprocess.run(*args, startupinfo=get_subprocess_startupinfo(), **kwargs) + + def get_runtime_version(runtime: Optional[Runtime] = None) -> Tuple[int, int]: """Get the major/minor parts of the Docker/Podman version. @@ -75,9 +80,8 @@ def get_runtime_version(runtime: Optional[Runtime] = None) -> Tuple[int, int]: cmd = [str(runtime.path), "version", "-f", query] try: - version = subprocess.run( + version = subprocess_run( cmd, - startupinfo=get_subprocess_startupinfo(), capture_output=True, check=True, ).stdout.decode() @@ -193,8 +197,6 @@ def load_image_tarball() -> None: add_image_tag(bad_tag, good_tag) delete_image_tag(bad_tag) - log.info("Successfully installed container image") - def tag_image_by_digest(digest: str, tag: str) -> None: """Tag a container image by digest. @@ -204,7 +206,7 @@ def tag_image_by_digest(digest: str, tag: str) -> None: image_id = get_image_id_by_digest(digest) cmd = [str(runtime.path), "tag", image_id, tag] log.debug(" ".join(cmd)) - subprocess.run(cmd, startupinfo=get_subprocess_startupinfo(), check=True) + subprocess_run(cmd, check=True) def get_image_id_by_digest(digest: str) -> str: @@ -221,9 +223,7 @@ def get_image_id_by_digest(digest: str) -> str: "{{.Id}}", ] log.debug(" ".join(cmd)) - process = subprocess.run( - cmd, startupinfo=get_subprocess_startupinfo(), check=True, capture_output=True - ) + process = subprocess_run(cmd, check=True, capture_output=True) # In case we have multiple lines, we only want the first one. return process.stdout.decode().strip().split("\n")[0] @@ -232,10 +232,12 @@ def container_pull(image: str, manifest_digest: str): """Pull a container image from a registry.""" runtime = Runtime() cmd = [str(runtime.path), "pull", f"{image}@sha256:{manifest_digest}"] - process = subprocess.Popen(cmd, stdout=subprocess.PIPE) - process.communicate() - if process.returncode != 0: - raise errors.ContainerPullException(f"Could not pull the container image: {e}") + try: + subprocess_run(cmd, check=True) + except subprocess.CalledProcessError as e: + raise errors.ContainerPullException( + f"Could not pull the container image: {e}" + ) from e def get_local_image_digest(image: str) -> str: @@ -249,7 +251,11 @@ def get_local_image_digest(image: str) -> str: cmd = [str(runtime.path), "images", image, "--format", "{{.Digest}}"] log.debug(" ".join(cmd)) try: - result = subprocess.run(cmd, capture_output=True, check=True) + result = subprocess_run( + cmd, + capture_output=True, + check=True, + ) lines = result.stdout.decode().strip().split("\n") if len(lines) != 1: raise errors.MultipleImagesFoundException( diff --git a/dangerzone/updater/cli.py b/dangerzone/updater/cli.py index e496aaf..9363d51 100644 --- a/dangerzone/updater/cli.py +++ b/dangerzone/updater/cli.py @@ -29,11 +29,10 @@ def upgrade(image: str, pubkey: str) -> None: """Upgrade the image to the latest signed version.""" manifest_digest = registry.get_manifest_digest(image) try: - is_upgraded = signatures.upgrade_container_image(image, manifest_digest, pubkey) - if is_upgraded: - click.echo(f"✅ The local image {image} has been upgraded") - click.echo(f"✅ The image has been signed with {pubkey}") - click.echo(f"✅ Signatures has been verified and stored locally") + signatures.upgrade_container_image(image, manifest_digest, pubkey) + click.echo(f"✅ The local image {image} has been upgraded") + click.echo(f"✅ The image has been signed with {pubkey}") + click.echo(f"✅ Signatures has been verified and stored locally") except errors.ImageAlreadyUpToDate as e: click.echo(f"✅ {e}") diff --git a/dangerzone/updater/signatures.py b/dangerzone/updater/signatures.py index 27cd55d..46f382a 100644 --- a/dangerzone/updater/signatures.py +++ b/dangerzone/updater/signatures.py @@ -22,13 +22,17 @@ except ImportError: import appdirs as platformdirs # type: ignore[no-redef] -def get_config_dir() -> Path: - return Path(platformdirs.user_config_dir("dangerzone")) +def appdata_dir() -> Path: + return Path(platformdirs.user_data_dir("dangerzone")) +# RELEASE: Bump this value to the log index of the latest signature +# to ensures the software can't upgrade to container images that predates it. +DEFAULT_LOG_INDEX = 0 + # XXX Store this somewhere else. DEFAULT_PUBKEY_LOCATION = get_resource_path("freedomofpress-dangerzone-pub.key") -SIGNATURES_PATH = get_config_dir() / "signatures" +SIGNATURES_PATH = appdata_dir() / "signatures" LAST_LOG_INDEX = SIGNATURES_PATH / "last_log_index" __all__ = [ @@ -61,9 +65,14 @@ def signature_to_bundle(sig: Dict) -> Dict: } -def verify_signature(signature: dict, image_digest: str, pubkey: str) -> bool: - """Verify a signature against a given public key""" - # XXX - Also verfy the identity/docker-reference field against the expected value +def verify_signature(signature: dict, image_digest: str, pubkey: str | Path) -> None: + """ + Verifies that: + + - the signature has been signed by the given public key + - the signature matches the given image digest + """ + # XXX - Also verify the identity/docker-reference field against the expected value # e.g. ghcr.io/freedomofpress/dangerzone/dangerzone cosign.ensure_installed() @@ -75,7 +84,8 @@ def verify_signature(signature: dict, image_digest: str, pubkey: str) -> bool: ] if payload_digest != f"sha256:{image_digest}": raise errors.SignatureMismatch( - f"The signature does not match the image digest ({payload_digest}, {image_digest})" + "The given signature does not match the expected image digest " + f"({payload_digest}, {image_digest})" ) with ( @@ -99,14 +109,10 @@ def verify_signature(signature: dict, image_digest: str, pubkey: str) -> bool: ] log.debug(" ".join(cmd)) result = subprocess.run(cmd, capture_output=True) - if result.returncode != 0: - # XXX Raise instead? + if result.returncode != 0 or result.stderr != b"Verified OK\n": log.debug("Failed to verify signature", result.stderr) raise errors.SignatureVerificationError("Failed to verify signature") - if result.stderr == b"Verified OK\n": - log.debug("Signature verified") - return True - return False + log.debug("Signature verified") class Signature: @@ -144,15 +150,14 @@ def verify_signatures( pubkey: str, ) -> bool: for signature in signatures: - if not verify_signature(signature, image_digest, pubkey): - raise errors.SignatureVerificationError() + verify_signature(signature, image_digest, pubkey) return True def get_last_log_index() -> int: SIGNATURES_PATH.mkdir(parents=True, exist_ok=True) if not LAST_LOG_INDEX.exists(): - return 0 + return DEFAULT_LOG_INDEX with open(LAST_LOG_INDEX) as f: return int(f.read()) diff --git a/tests/test_signatures.py b/tests/test_signatures.py new file mode 100644 index 0000000..5f7a846 --- /dev/null +++ b/tests/test_signatures.py @@ -0,0 +1,385 @@ +import json +import unittest +from pathlib import Path + +import pytest +from pytest_subprocess import FakeProcess + +from dangerzone import errors as dzerrors +from dangerzone.updater import errors +from dangerzone.updater.signatures import ( + Signature, + get_last_log_index, + get_log_index_from_signatures, + get_remote_signatures, + is_update_available, + load_and_verify_signatures, + prepare_airgapped_archive, + store_signatures, + upgrade_container_image, + verify_local_image, + verify_signature, + verify_signatures, +) + +ASSETS_PATH = Path(__file__).parent / "assets" +TEST_PUBKEY_PATH = ASSETS_PATH / "test.pub.key" +INVALID_SIGNATURES_PATH = ASSETS_PATH / "signatures" / "invalid" +VALID_SIGNATURES_PATH = ASSETS_PATH / "signatures" / "valid" +TEMPERED_SIGNATURES_PATH = ASSETS_PATH / "signatures" / "tempered" + +RANDOM_DIGEST = "aacc9b586648bbe3040f2822153b1d5ead2779af45ff750fd6f04daf4a9f64b4" + + +@pytest.fixture +def valid_signature(): + signature_file = next(VALID_SIGNATURES_PATH.glob("**/*.json")) + with open(signature_file, "r") as signature_file: + signatures = json.load(signature_file) + return signatures.pop() + + +@pytest.fixture +def tempered_signature(): + signature_file = next(TEMPERED_SIGNATURES_PATH.glob("**/*.json")) + with open(signature_file, "r") as signature_file: + signatures = json.load(signature_file) + return signatures.pop() + + +@pytest.fixture +def signature_other_digest(valid_signature): + signature = valid_signature.copy() + signature["Bundle"]["Payload"]["digest"] = "sha256:123456" + return signature + + +def test_load_valid_signatures(mocker): + mocker.patch("dangerzone.updater.signatures.SIGNATURES_PATH", VALID_SIGNATURES_PATH) + valid_signatures = list(VALID_SIGNATURES_PATH.glob("**/*.json")) + assert len(valid_signatures) > 0 + for file in valid_signatures: + signatures = load_and_verify_signatures(file.stem, TEST_PUBKEY_PATH) + assert isinstance(signatures, list) + assert len(signatures) > 0 + + +def test_load_invalid_signatures(mocker): + mocker.patch( + "dangerzone.updater.signatures.SIGNATURES_PATH", INVALID_SIGNATURES_PATH + ) + invalid_signatures = list(INVALID_SIGNATURES_PATH.glob("**/*.json")) + assert len(invalid_signatures) > 0 + for file in invalid_signatures: + with pytest.raises(errors.SignatureError): + load_and_verify_signatures(file.stem, TEST_PUBKEY_PATH) + + +def test_load_tempered_signatures(mocker): + mocker.patch( + "dangerzone.updater.signatures.SIGNATURES_PATH", TEMPERED_SIGNATURES_PATH + ) + tempered_signatures = list(TEMPERED_SIGNATURES_PATH.glob("**/*.json")) + assert len(tempered_signatures) > 0 + for file in tempered_signatures: + with pytest.raises(errors.SignatureError): + load_and_verify_signatures(file.stem, TEST_PUBKEY_PATH) + + +def test_get_log_index_from_signatures(): + signatures = [{"Bundle": {"Payload": {"logIndex": 1}}}] + assert get_log_index_from_signatures(signatures) == 1 + + +def test_get_log_index_from_signatures_empty(): + signatures = [] + assert get_log_index_from_signatures(signatures) == 0 + + +def test_get_log_index_from_malformed_signatures(): + signatures = [{"Bundle": {"Payload": {"logIndex": "foo"}}}] + assert get_log_index_from_signatures(signatures) == 0 + + +def test_get_log_index_from_missing_log_index(): + signatures = [{"Bundle": {"Payload": {}}}] + assert get_log_index_from_signatures(signatures) == 0 + + +def test_upgrade_container_image_if_already_up_to_date(mocker): + mocker.patch( + "dangerzone.updater.signatures.is_update_available", return_value=(False, None) + ) + with pytest.raises(errors.ImageAlreadyUpToDate): + upgrade_container_image( + "ghcr.io/freedomofpress/dangerzone/dangerzone", "sha256:123456", "test.pub" + ) + + +def test_upgrade_container_without_signatures(mocker): + mocker.patch( + "dangerzone.updater.signatures.is_update_available", + return_value=(True, "sha256:123456"), + ) + mocker.patch("dangerzone.updater.signatures.get_remote_signatures", return_value=[]) + with pytest.raises(errors.SignatureVerificationError): + upgrade_container_image( + "ghcr.io/freedomofpress/dangerzone/dangerzone", + "sha256:123456", + "test.pub", + ) + + +def test_upgrade_container_lower_log_index(mocker): + image_digest = "4da441235e84e93518778827a5c5745d532d7a4079886e1647924bee7ef1c14d" + signatures = load_and_verify_signatures( + image_digest, + TEST_PUBKEY_PATH, + bypass_verification=True, + signatures_path=VALID_SIGNATURES_PATH, + ) + mocker.patch( + "dangerzone.updater.signatures.is_update_available", + return_value=( + True, + image_digest, + ), + ) + mocker.patch( + "dangerzone.updater.signatures.get_remote_signatures", + return_value=signatures, + ) + # Mock to avoid loosing time on test failures + mocker.patch("dangerzone.container_utils.container_pull") + # The log index of the incoming signatures is 168652066 + mocker.patch( + "dangerzone.updater.signatures.get_last_log_index", + return_value=168652067, + ) + + with pytest.raises(errors.InvalidLogIndex): + upgrade_container_image( + "ghcr.io/freedomofpress/dangerzone/dangerzone", + image_digest, + TEST_PUBKEY_PATH, + ) + + +def test_prepare_airgapped_archive_requires_digest(): + with pytest.raises(errors.AirgappedImageDownloadError): + prepare_airgapped_archive( + "ghcr.io/freedomofpress/dangerzone/dangerzone", "test.tar" + ) + + +def test_get_remote_signatures_error(fp: FakeProcess, mocker): + image = "ghcr.io/freedomofpress/dangerzone/dangerzone" + digest = "123456" + mocker.patch("dangerzone.updater.cosign.ensure_installed", return_value=True) + fp.register_subprocess( + ["cosign", "download", "signature", f"{image}@sha256:{digest}"], returncode=1 + ) + with pytest.raises(errors.NoRemoteSignatures): + get_remote_signatures(image, digest) + + +def test_get_remote_signatures_empty(fp: FakeProcess, mocker): + image = "ghcr.io/freedomofpress/dangerzone/dangerzone" + digest = "123456" + mocker.patch("dangerzone.updater.cosign.ensure_installed", return_value=True) + fp.register_subprocess( + ["cosign", "download", "signature", f"{image}@sha256:{digest}"], + stdout=json.dumps({}), + ) + with pytest.raises(errors.NoRemoteSignatures): + get_remote_signatures(image, digest) + + +def test_get_remote_signatures_cosign_error(mocker, fp: FakeProcess): + image = "ghcr.io/freedomofpress/dangerzone/dangerzone" + digest = "123456" + mocker.patch("dangerzone.updater.cosign.ensure_installed", return_value=True) + fp.register_subprocess( + ["cosign", "download", "signature", f"{image}@sha256:{digest}"], + returncode=1, + stderr="Error: no signatures associated", + ) + with pytest.raises(errors.NoRemoteSignatures): + get_remote_signatures(image, digest) + + +def test_store_signatures_with_different_digests( + valid_signature, signature_other_digest, mocker, tmp_path +): + """Test that store_signatures raises an error when a signature's digest doesn't match.""" + signatures = [valid_signature, signature_other_digest] + image_digest = "sha256:123456" + + # Mock the signatures path + signatures_path = tmp_path / "signatures" + signatures_path.mkdir() + mocker.patch("dangerzone.updater.signatures.SIGNATURES_PATH", signatures_path) + + # Mock get_log_index_from_signatures + mocker.patch( + "dangerzone.updater.signatures.get_log_index_from_signatures", + return_value=100, + ) + + # Mock get_last_log_index + mocker.patch( + "dangerzone.updater.signatures.get_last_log_index", + return_value=50, + ) + + # Call store_signatures + with pytest.raises(errors.SignatureMismatch): + store_signatures(signatures, image_digest, TEST_PUBKEY_PATH) + + # Verify that the signatures file was not created + assert not (signatures_path / f"{image_digest}.json").exists() + + # Verify that the log index file was not updated + assert not (signatures_path / "last_log_index").exists() + + +def test_stores_signatures_updates_last_log_index(valid_signature, mocker, tmp_path): + """Test that store_signatures updates the last log index file.""" + signatures = [valid_signature] + # Extract the digest from the signature + image_digest = Signature(valid_signature).manifest_digest + + # Mock the signatures path + signatures_path = tmp_path / "signatures" + signatures_path.mkdir() + mocker.patch("dangerzone.updater.signatures.SIGNATURES_PATH", signatures_path) + + # Create an existing last_log_index file with a lower value + with open(signatures_path / "last_log_index", "w") as f: + f.write("50") + + # Mock get_log_index_from_signatures to return a higher value + mocker.patch( + "dangerzone.updater.signatures.get_log_index_from_signatures", + return_value=100, + ) + + # Call store_signatures + with pytest.raises(errors.SignatureMismatch): + store_signatures(signatures, image_digest, TEST_PUBKEY_PATH) + ("dangerzone.updater.signatures.get_last_log_index",) + # Verify that the signatures file was not created + assert not (signatures_path / f"{image_digest}.json").exists() + + # Verify that the log index file was not updated + assert not (signatures_path / "last_log_index").exists() + + +def test_stores_signatures_updates_last_log_index(valid_signature, mocker, tmp_path): + """Test that store_signatures updates the last log index file.""" + signatures = [valid_signature] + # Extract the digest from the signature + image_digest = Signature(valid_signature).manifest_digest + signatures = [valid_signature, signature_other_digest] + breakpoint() + valid_signature, signature_other_digest, mocker, tmp_path + + """Test that store_signatures raises an error when a signature's digest doesn't match.""" + + image_digest = "sha256:123456" + + # Mock the signatures path + signatures_path = tmp_path / "signatures" + signatures_path.mkdir() + mocker.patch("dangerzone.updater.signatures.SIGNATURES_PATH", signatures_path) + + # Mock get_log_index_from_signatures + mocker.patch( + "dangerzone.updater.signatures.get_log_index_from_signatures", + return_value=100, + ) + + # Mock get_last_log_index + mocker.patch( + "dangerzone.updater.signatures.get_last_log_index", + return_value=50, + ) + + +def test_stores_signatures_updates_last_log_index(): + pass + + +def test_get_file_digest(): + # Mock the signatures path + signatures_path = tmp_path / "signatures" + signatures_path.mkdir() + mocker.patch("dangerzone.updater.signatures.SIGNATURES_PATH", signatures_path) + + # Create an existing last_log_index file with a lower value + with open(signatures_path / "last_log_index", "w") as f: + f.write("50") + + # Mock get_log_index_from_signatures to return a higher value + mocker.patch( + "dangerzone.updater.signatures.get_log_index_from_signatures", + return_value=100, + ) + + # Call store_signatures + store_signatures(signatures, image_digest, TEST_PUBKEY_PATH) + + # Verify that the log index file was updated + assert (signatures_path / "last_log_index").exists() + with open(signatures_path / "last_log_index", "r") as f: + assert f.read() == "100" + + +def test_is_update_available_when_no_local_image(mocker): + """ + Test that is_update_available returns True when no local image is + currently present. + """ + # Mock container_image_exists to return False + mocker.patch( + "dangerzone.container_utils.get_local_image_digest", + side_effect=dzerrors.ImageNotPresentException, + ) + + # Mock get_manifest_digest to return a digest + mocker.patch( + "dangerzone.updater.registry.get_manifest_digest", + return_value=RANDOM_DIGEST, + ) + + # Call is_update_available + update_available, digest = is_update_available("ghcr.io/freedomofpress/dangerzone") + + # Verify the result + assert update_available is True + assert digest == RANDOM_DIGEST + + +def test_verify_signature(valid_signature): + """Test that verify_signature raises an error when the payload digest doesn't match.""" + verify_signature( + valid_signature, + Signature(valid_signature).manifest_digest, + TEST_PUBKEY_PATH, + ) + + +def test_verify_signature_tempered(tempered_signature): + """Test that verify_signature raises an error when the payload digest doesn't match.""" + # Call verify_signature and expect an error + with pytest.raises(errors.SignatureError): + verify_signature( + tempered_signature, + Signature(tempered_signature).manifest_digest, + TEST_PUBKEY_PATH, + ) + + +def test_verify_signatures_empty_list(): + with pytest.raises(errors.SignatureVerificationError): + verify_signatures([], "1234", TEST_PUBKEY_PATH) From 27a91f9a0ec89580cda842fd0f54ba5f60fea49c Mon Sep 17 00:00:00 2001 From: Alex Pyrgiotis Date: Tue, 11 Feb 2025 19:15:49 +0100 Subject: [PATCH 04/24] Publish and attest multi-architecture container images A new `dangerzone-image attest-provenance` script is now available, making it possible to verify the attestations of an image published on the github container registry. Container images are now build nightly and uploaded to the container registry. --- dangerzone/updater/attestations.py | 90 ++++++++++++++++++++++++++++++ dangerzone/updater/cli.py | 49 ++++++++++++++++ 2 files changed, 139 insertions(+) create mode 100644 dangerzone/updater/attestations.py diff --git a/dangerzone/updater/attestations.py b/dangerzone/updater/attestations.py new file mode 100644 index 0000000..bdf1ef6 --- /dev/null +++ b/dangerzone/updater/attestations.py @@ -0,0 +1,90 @@ +import subprocess +from tempfile import NamedTemporaryFile + +from . import cosign + +# NOTE: You can grab the SLSA attestation for an image/tag pair with the following +# commands: +# +# IMAGE=ghcr.io/apyrgio/dangerzone/dangerzone +# TAG=20250129-0.8.0-149-gbf2f5ac +# DIGEST=$(crane digest ${IMAGE?}:${TAG?}) +# ATT_MANIFEST=${IMAGE?}:${DIGEST/:/-}.att +# ATT_BLOB=${IMAGE?}@$(crane manifest ${ATT_MANIFEST?} | jq -r '.layers[0].digest') +# crane blob ${ATT_BLOB?} | jq -r '.payload' | base64 -d | jq +CUE_POLICY = r""" +// The predicateType field must match this string +predicateType: "https://slsa.dev/provenance/v0.2" + +predicate: {{ + // This condition verifies that the builder is the builder we + // expect and trust. The following condition can be used + // unmodified. It verifies that the builder is the container + // workflow. + builder: {{ + id: =~"^https://github.com/slsa-framework/slsa-github-generator/.github/workflows/generator_container_slsa3.yml@refs/tags/v[0-9]+.[0-9]+.[0-9]+$" + }} + invocation: {{ + configSource: {{ + // This condition verifies the entrypoint of the workflow. + // Replace with the relative path to your workflow in your + // repository. + entryPoint: "{workflow}" + + // This condition verifies that the image was generated from + // the source repository we expect. Replace this with your + // repository. + uri: =~"^git\\+https://github.com/{repository}@refs/heads/{branch}" + // Add a condition to check for a specific commit hash + digest: {{ + sha1: "{commit}" + }} + }} + }} +}} +""" + + +def verify( + image_name: str, + branch: str, + commit: str, + repository: str, + workflow: str, +) -> bool: + """ + Look up the image attestation to see if the image has been built + on Github runners, and from a given repository. + """ + cosign.ensure_installed() + policy = CUE_POLICY.format( + repository=repository, workflow=workflow, commit=commit, branch=branch + ) + + # Put the value in files and verify with cosign + with ( + NamedTemporaryFile(mode="w", suffix=".cue") as policy_f, + ): + policy_f.write(policy) + policy_f.flush() + + # Call cosign with the temporary file paths + cmd = [ + "cosign", + "verify-attestation", + "--type", + "slsaprovenance", + "--policy", + policy_f.name, + "--certificate-oidc-issuer", + "https://token.actions.githubusercontent.com", + "--certificate-identity-regexp", + "^https://github.com/slsa-framework/slsa-github-generator/.github/workflows/generator_container_slsa3.yml@refs/tags/v[0-9]+.[0-9]+.[0-9]+$", + image_name, + ] + + result = subprocess.run(cmd, capture_output=True) + if result.returncode != 0: + error = result.stderr.decode() + raise Exception(f"Attestation cannot be verified. {error}") + return True diff --git a/dangerzone/updater/cli.py b/dangerzone/updater/cli.py index 9363d51..ede57d8 100644 --- a/dangerzone/updater/cli.py +++ b/dangerzone/updater/cli.py @@ -103,5 +103,54 @@ def get_manifest(image: str) -> None: click.echo(registry.get_manifest(image).content) +@main.command() +@click.argument("image_name") +# XXX: Do we really want to check against this? +@click.option( + "--branch", + default=DEFAULT_BRANCH, + help="The Git branch that the image was built from", +) +@click.option( + "--commit", + required=True, + help="The Git commit the image was built from", +) +@click.option( + "--repository", + default=DEFAULT_REPOSITORY, + help="The github repository to check the attestation for", +) +@click.option( + "--workflow", + default=".github/workflows/release-container-image.yml", + help="The path of the GitHub actions workflow this image was created from", +) +def attest_provenance( + image_name: str, + branch: str, + commit: str, + repository: str, + workflow: str, +) -> None: + """ + Look up the image attestation to see if the image has been built + on Github runners, and from a given repository. + """ + # TODO: Parse image and make sure it has a tag. Might even check for a digest. + # parsed = registry.parse_image_location(image) + + verified = attestations.verify(image_name, branch, commit, repository, workflow) + if verified: + click.echo( + f"🎉 Successfully verified image '{image_name}' and its associated claims:" + ) + click.echo(f"- ✅ SLSA Level 3 provenance") + click.echo(f"- ✅ GitHub repo: {repository}") + click.echo(f"- ✅ GitHub actions workflow: {workflow}") + click.echo(f"- ✅ Git branch: {branch}") + click.echo(f"- ✅ Git commit: {commit}") + + if __name__ == "__main__": main() From 53fbbc6cdf4463421e9c1a55d6f96b486b294543 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Tue, 11 Feb 2025 19:19:54 +0100 Subject: [PATCH 05/24] Add documentation for independent container updates --- .../independent-container-updates.md | 83 +++++++++++++++++++ 1 file changed, 83 insertions(+) create mode 100644 docs/developer/independent-container-updates.md diff --git a/docs/developer/independent-container-updates.md b/docs/developer/independent-container-updates.md new file mode 100644 index 0000000..9f008a0 --- /dev/null +++ b/docs/developer/independent-container-updates.md @@ -0,0 +1,83 @@ +# Independent Container Updates + +Since version 0.9.0, Dangerzone is able to ship container images independently +from releases of the software. + +One of the main benefits of doing so is to shorten the time neede to distribute the security fixes for the containers. Being the place where the actual conversion of documents happen, it's a way to keep dangerzone users secure. + +If you are a dangerzone user, this all happens behind the curtain, and you should not have to know anything about that to enjoy these "in-app" updates. If you are using dangerzone in an air-gapped environment, check the sections below. + +## Checking attestations + +Each night, new images are built and pushed to the container registry, alongside +with a provenance attestation, enabling anybody to ensure that the image has +been originally built by Github CI runners, from a defined source repository (in our case `freedomofpress/dangerzone`). + +To verify the attestations against our expectations, use the following command: +```bash +dangerzone-image attest-provenance ghcr.io/freedomofpress/dangerzone/dangerzone --repository freedomofpress/dangerzone +``` + +In case of sucess, it will report back: + +``` +🎉 Successfully verified image +'ghcr.io/freedomofpress/dangerzone/dangerzone:@sha256:' +and its associated claims: +- ✅ SLSA Level 3 provenance +- ✅ GitHub repo: freedomofpress/dangerzone +- ✅ GitHub actions workflow: +- ✅ Git branch: +- ✅ Git commit: +``` + +## Sign and publish the remote image + +Once the image has been reproduced locally, we can add a signature to the container registry, +and update the `latest` tag to point to the proper hash. + +```bash +cosign sign --sk ghcr.io/freedomofpress/dangerzone/dangerzone:${TAG}@sha256:${DIGEST} + +# And mark bump latest +crane auth login ghcr.io -u USERNAME --password $(cat pat_token) +crane tag ghcr.io/freedomofpress/dangerzone/dangerzone@sha256:${DIGEST} latest +``` + +## Install updates + +To check if a new container image has been released, and update your local installation with it, you can use the following commands: + +```bash +dangerzone-image upgrade ghcr.io/freedomofpress/dangerzone/dangerzone +``` + +## Verify locally + +You can verify that the image you have locally matches the stored signatures, and that these have been signed with a trusted public key: + +```bash +dangerzone-image verify-local ghcr.io/freedomofpress/dangerzone/dangerzone +``` + +## Installing image updates to air-gapped environments + +Three steps are required: + +1. Prepare the archive +2. Transfer the archive to the air-gapped system +3. Install the archive on the air-gapped system + +This archive will contain all the needed material to validate that the new container image has been signed and is valid. + +On the machine on which you prepare the packages: + +```bash +dangerzone-image prepare-archive --output dz-fa94872.tar ghcr.io/freedomofpress/dangerzone/dangerzone@sha256: +``` + +On the airgapped machine, copy the file and run the following command: + +```bash +dangerzone-image load-archive dz-fa94872.tar +``` From 6359e488e31afc8ff97610829865a51e3432ff55 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Tue, 11 Feb 2025 19:23:05 +0100 Subject: [PATCH 06/24] Check for container updates rather than using `image-id.txt` --- dangerzone/isolation_provider/container.py | 57 ++++++++++------------ 1 file changed, 25 insertions(+), 32 deletions(-) diff --git a/dangerzone/isolation_provider/container.py b/dangerzone/isolation_provider/container.py index 520375f..3151680 100644 --- a/dangerzone/isolation_provider/container.py +++ b/dangerzone/isolation_provider/container.py @@ -5,7 +5,7 @@ import shlex import subprocess from typing import List, Tuple -from .. import container_utils, errors +from .. import container_utils, errors, updater from ..container_utils import Runtime from ..document import Document from ..util import get_resource_path, get_subprocess_startupinfo @@ -95,23 +95,18 @@ class Container(IsolationProvider): @staticmethod def install() -> bool: - """Install the container image tarball, or verify that it's already installed. + """Check if an update is available and install it if necessary.""" + # XXX Do this only if users have optted in to auto-updates - Perform the following actions: - 1. Get the tags of any locally available images that match Dangerzone's image - name. - 2. Get the expected image tag from the image-id.txt file. - - If this tag is present in the local images, then we can return. - - Else, prune the older container images and continue. - 3. Load the image tarball and make sure it matches the expected tag. - """ - old_tags = container_utils.list_image_tags() - expected_tag = container_utils.get_expected_tag() - - if expected_tag not in old_tags: - # Prune older container images. - log.info( - f"Could not find a Dangerzone container image with tag '{expected_tag}'" + # # Load the image tarball into the container runtime. + update_available, image_digest = updater.is_update_available( + container_utils.CONTAINER_NAME + ) + if update_available and image_digest: + updater.upgrade_container_image( + container_utils.CONTAINER_NAME, + image_digest, + updater.DEFAULT_PUBKEY_LOCATION, ) for tag in old_tags: tag = container_utils.CONTAINER_NAME + ":" + tag @@ -119,18 +114,9 @@ class Container(IsolationProvider): else: return True - # Load the image tarball into the container runtime. - container_utils.load_image_tarball() - - # Check that the container image has the expected image tag. - # See https://github.com/freedomofpress/dangerzone/issues/988 for an example - # where this was not the case. - new_tags = container_utils.list_image_tags() - if expected_tag not in new_tags: - raise errors.ImageNotPresentException( - f"Could not find expected tag '{expected_tag}' after loading the" - " container image tarball" - ) + updater.verify_local_image( + container_utils.CONTAINER_NAME, updater.DEFAULT_PUBKEY_LOCATION + ) return True @@ -214,6 +200,15 @@ class Container(IsolationProvider): name: str, ) -> subprocess.Popen: runtime = Runtime() + + image_digest = container_utils.get_local_image_digest( + container_utils.CONTAINER_NAME + ) + updater.verify_local_image( + container_utils.CONTAINER_NAME, + updater.DEFAULT_PUBKEY_LOCATION, + image_digest, + ) security_args = self.get_runtime_security_args() debug_args = [] if self.debug: @@ -222,9 +217,7 @@ class Container(IsolationProvider): enable_stdin = ["-i"] set_name = ["--name", name] prevent_leakage_args = ["--rm"] - image_name = [ - container_utils.CONTAINER_NAME + ":" + container_utils.get_expected_tag() - ] + image_name = [container_utils.CONTAINER_NAME + "@sha256:" + image_digest] args = ( ["run"] + security_args From 238ea527e6fc9cf3f2966716507c20a3bb1cfb6d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Wed, 12 Feb 2025 18:23:12 +0100 Subject: [PATCH 07/24] Add signatures tests --- dangerzone/container_utils.py | 7 +- dangerzone/isolation_provider/container.py | 25 +- dangerzone/updater/signatures.py | 85 ++++--- tests/assets/signatures/README.md | 7 + ...d955e68ee3e07b41b9d53f4c8cc9929a68a67.json | 18 ++ ...aa9338681e64dd3e34a34873866cb051d694e.json | 18 ++ ...5745d532d7a4079886e1647924bee7ef1c14d.json | 18 ++ ...2230dc6566997f852ef5d62b0338b46796e01.json | 18 ++ ...d955e68ee3e07b41b9d53f4c8cc9929a68a67.json | 18 ++ ...aa9338681e64dd3e34a34873866cb051d694e.json | 18 ++ .../README.md | 1 + ...d955e68ee3e07b41b9d53f4c8cc9929a68a67.json | 1 + ...aa9338681e64dd3e34a34873866cb051d694e.json | 1 + ...5745d532d7a4079886e1647924bee7ef1c14d.json | 1 + ...2230dc6566997f852ef5d62b0338b46796e01.json | 1 + ...bac18522b35b2491fdf716236a0b3502a2ca7.json | 1 + tests/assets/test.pub.key | 4 + tests/conftest.py | 8 + tests/test_registry.py | 238 ++++++++++++++++++ tests/test_signatures.py | 111 ++++++-- 20 files changed, 539 insertions(+), 60 deletions(-) create mode 100644 tests/assets/signatures/README.md create mode 100644 tests/assets/signatures/invalid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/19e8eacd75879d05f6621c2ea8dd955e68ee3e07b41b9d53f4c8cc9929a68a67.json create mode 100644 tests/assets/signatures/invalid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/220b52200e3e47b1b42010667fcaa9338681e64dd3e34a34873866cb051d694e.json create mode 100644 tests/assets/signatures/invalid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/4da441235e84e93518778827a5c5745d532d7a4079886e1647924bee7ef1c14d.json create mode 100644 tests/assets/signatures/invalid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/7b21dbdebffed855621dfcdeaa52230dc6566997f852ef5d62b0338b46796e01.json create mode 100644 tests/assets/signatures/tempered/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/19e8eacd75879d05f6621c2ea8dd955e68ee3e07b41b9d53f4c8cc9929a68a67.json create mode 100644 tests/assets/signatures/tempered/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/220b52200e3e47b1b42010667fcaa9338681e64dd3e34a34873866cb051d694e.json create mode 100644 tests/assets/signatures/tempered/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/README.md create mode 100644 tests/assets/signatures/valid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/19e8eacd75879d05f6621c2ea8dd955e68ee3e07b41b9d53f4c8cc9929a68a67.json create mode 100644 tests/assets/signatures/valid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/220b52200e3e47b1b42010667fcaa9338681e64dd3e34a34873866cb051d694e.json create mode 100644 tests/assets/signatures/valid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/4da441235e84e93518778827a5c5745d532d7a4079886e1647924bee7ef1c14d.json create mode 100644 tests/assets/signatures/valid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/7b21dbdebffed855621dfcdeaa52230dc6566997f852ef5d62b0338b46796e01.json create mode 100644 tests/assets/signatures/valid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/fa948726aac29a6ac49f01ec8fbbac18522b35b2491fdf716236a0b3502a2ca7.json create mode 100644 tests/assets/test.pub.key create mode 100644 tests/test_registry.py diff --git a/dangerzone/container_utils.py b/dangerzone/container_utils.py index 162901e..7c8f06d 100644 --- a/dangerzone/container_utils.py +++ b/dangerzone/container_utils.py @@ -261,7 +261,12 @@ def get_local_image_digest(image: str) -> str: raise errors.MultipleImagesFoundException( f"Expected a single line of output, got {len(lines)} lines" ) - return lines[0].replace("sha256:", "") + image_digest = lines[0].replace("sha256:", "") + if not image_digest: + raise errors.ImageNotPresentException( + f"The image {image} does not exist locally" + ) + return image_digest except subprocess.CalledProcessError as e: raise errors.ImageNotPresentException( f"The image {image} does not exist locally" diff --git a/dangerzone/isolation_provider/container.py b/dangerzone/isolation_provider/container.py index 3151680..a5bb6b7 100644 --- a/dangerzone/isolation_provider/container.py +++ b/dangerzone/isolation_provider/container.py @@ -96,23 +96,21 @@ class Container(IsolationProvider): @staticmethod def install() -> bool: """Check if an update is available and install it if necessary.""" - # XXX Do this only if users have optted in to auto-updates - - # # Load the image tarball into the container runtime. - update_available, image_digest = updater.is_update_available( - container_utils.CONTAINER_NAME - ) - if update_available and image_digest: - updater.upgrade_container_image( - container_utils.CONTAINER_NAME, - image_digest, - updater.DEFAULT_PUBKEY_LOCATION, + # XXX Do this only if users have opted in to auto-updates + if False: # Comment this for now, just as an exemple of this can be implemented + # # Load the image tarball into the container runtime. + update_available, image_digest = updater.is_update_available( + container_utils.CONTAINER_NAME ) + if update_available and image_digest: + updater.upgrade_container_image( + container_utils.CONTAINER_NAME, + image_digest, + updater.DEFAULT_PUBKEY_LOCATION, + ) for tag in old_tags: tag = container_utils.CONTAINER_NAME + ":" + tag container_utils.delete_image_tag(tag) - else: - return True updater.verify_local_image( container_utils.CONTAINER_NAME, updater.DEFAULT_PUBKEY_LOCATION @@ -207,7 +205,6 @@ class Container(IsolationProvider): updater.verify_local_image( container_utils.CONTAINER_NAME, updater.DEFAULT_PUBKEY_LOCATION, - image_digest, ) security_args = self.get_runtime_security_args() debug_args = [] diff --git a/dangerzone/updater/signatures.py b/dangerzone/updater/signatures.py index 46f382a..d452967 100644 --- a/dangerzone/updater/signatures.py +++ b/dangerzone/updater/signatures.py @@ -37,7 +37,7 @@ LAST_LOG_INDEX = SIGNATURES_PATH / "last_log_index" __all__ = [ "verify_signature", - "load_signatures", + "load_and_verify_signatures", "store_signatures", "verify_offline_image_signature", ] @@ -77,11 +77,15 @@ def verify_signature(signature: dict, image_digest: str, pubkey: str | Path) -> cosign.ensure_installed() signature_bundle = signature_to_bundle(signature) - - payload_bytes = b64decode(signature_bundle["Payload"]) - payload_digest = json.loads(payload_bytes)["critical"]["image"][ - "docker-manifest-digest" - ] + try: + payload_bytes = b64decode(signature_bundle["Payload"]) + payload_digest = json.loads(payload_bytes)["critical"]["image"][ + "docker-manifest-digest" + ] + except Exception as e: + raise errors.SignatureVerificationError( + f"Unable to extract the payload digest from the signature: {e}" + ) if payload_digest != f"sha256:{image_digest}": raise errors.SignatureMismatch( "The given signature does not match the expected image digest " @@ -98,11 +102,14 @@ def verify_signature(signature: dict, image_digest: str, pubkey: str | Path) -> payload_file.write(payload_bytes) payload_file.flush() + if isinstance(pubkey, str): + pubkey = Path(pubkey) + cmd = [ "cosign", "verify-blob", "--key", - pubkey, + str(pubkey.absolute()), "--bundle", signature_file.name, payload_file.name, @@ -149,8 +156,12 @@ def verify_signatures( image_digest: str, pubkey: str, ) -> bool: + if len(signatures) < 1: + raise errors.SignatureVerificationError("No signatures found") + for signature in signatures: verify_signature(signature, image_digest, pubkey) + return True @@ -164,9 +175,14 @@ def get_last_log_index() -> int: def get_log_index_from_signatures(signatures: List[Dict]) -> int: - return reduce( - lambda acc, sig: max(acc, sig["Bundle"]["Payload"]["logIndex"]), signatures, 0 - ) + def _reducer(accumulator: int, signature: Dict) -> int: + try: + logIndex = int(signature["Bundle"]["Payload"]["logIndex"]) + except (KeyError, ValueError): + return accumulator + return max(accumulator, logIndex) + + return reduce(_reducer, signatures, 0) def write_log_index(log_index: int) -> None: @@ -302,13 +318,21 @@ def get_file_digest(file: Optional[str] = None, content: Optional[bytes] = None) return "" -def load_signatures(image_digest: str, pubkey: str) -> List[Dict]: +def load_and_verify_signatures( + image_digest: str, + pubkey: str, + bypass_verification: bool = False, + signatures_path: Optional[Path] = None, +) -> List[Dict]: """ Load signatures from the local filesystem See store_signatures() for the expected format. """ - pubkey_signatures = SIGNATURES_PATH / get_file_digest(pubkey) + if not signatures_path: + signatures_path = SIGNATURES_PATH + + pubkey_signatures = signatures_path / get_file_digest(pubkey) if not pubkey_signatures.exists(): msg = ( f"Cannot find a '{pubkey_signatures}' folder." @@ -318,7 +342,12 @@ def load_signatures(image_digest: str, pubkey: str) -> List[Dict]: with open(pubkey_signatures / f"{image_digest}.json") as f: log.debug("Loading signatures from %s", f.name) - return json.load(f) + signatures = json.load(f) + + if not bypass_verification: + verify_signatures(signatures, image_digest, pubkey) + + return signatures def store_signatures(signatures: list[Dict], image_digest: str, pubkey: str) -> None: @@ -380,32 +409,26 @@ def verify_local_image(image: str, pubkey: str) -> bool: raise errors.ImageNotFound(f"The image {image} does not exist locally") log.debug(f"Image digest: {image_digest}") - signatures = load_signatures(image_digest, pubkey) - if len(signatures) < 1: - raise errors.LocalSignatureNotFound("No signatures found") - - for signature in signatures: - if not verify_signature(signature, image_digest, pubkey): - msg = f"Unable to verify signature for {image} with pubkey {pubkey}" - raise errors.SignatureVerificationError(msg) + load_and_verify_signatures(image_digest, pubkey) return True def get_remote_signatures(image: str, digest: str) -> List[Dict]: - """Retrieve the signatures from the registry, via `cosign download`.""" + """Retrieve the signatures from the registry, via `cosign download signatures`.""" cosign.ensure_installed() - # XXX: try/catch here - process = subprocess.run( - ["cosign", "download", "signature", f"{image}@sha256:{digest}"], - capture_output=True, - check=True, - ) + try: + process = subprocess.run( + ["cosign", "download", "signature", f"{image}@sha256:{digest}"], + capture_output=True, + check=True, + ) + except subprocess.CalledProcessError as e: + raise errors.NoRemoteSignatures(e) - # XXX: Check the output first. # Remove the last return, split on newlines, convert from JSON signatures_raw = process.stdout.decode("utf-8").strip().split("\n") - signatures = list(map(json.loads, signatures_raw)) + signatures = list(filter(bool, map(json.loads, signatures_raw))) if len(signatures) < 1: raise errors.NoRemoteSignatures("No signatures found for the image") return signatures @@ -418,8 +441,8 @@ def prepare_airgapped_archive(image_name: str, destination: str) -> None: ) cosign.ensure_installed() - # Get the image from the registry + # Get the image from the registry with TemporaryDirectory() as tmpdir: msg = f"Downloading image {image_name}. \nIt might take a while." log.info(msg) diff --git a/tests/assets/signatures/README.md b/tests/assets/signatures/README.md new file mode 100644 index 0000000..e79adbc --- /dev/null +++ b/tests/assets/signatures/README.md @@ -0,0 +1,7 @@ +This folder contains signature-folders used for the testing the signatures implementation. + +The following folders are used: + +- `valid`: this folder contains signatures which should be considered valid and generated with the key available at `tests/assets/test.pub.key` +- `invalid`: this folder contains signatures which should be considered invalid, because their format doesn't match the expected one. e.g. it uses plain text instead of base64-encoded text. +- `tempered`: This folder contain signatures which have been tempered-with. The goal is to have signatures that looks valid, but actually aren't. diff --git a/tests/assets/signatures/invalid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/19e8eacd75879d05f6621c2ea8dd955e68ee3e07b41b9d53f4c8cc9929a68a67.json b/tests/assets/signatures/invalid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/19e8eacd75879d05f6621c2ea8dd955e68ee3e07b41b9d53f4c8cc9929a68a67.json new file mode 100644 index 0000000..8ff0ba9 --- /dev/null +++ b/tests/assets/signatures/invalid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/19e8eacd75879d05f6621c2ea8dd955e68ee3e07b41b9d53f4c8cc9929a68a67.json @@ -0,0 +1,18 @@ +[ + { + "Base64Signature": "Invalid base64 signature", + "Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hbG1ldC9kYW5nZXJ6b25lL2RhbmdlcnpvbmUifSwiaW1hZ2UiOnsiZG9ja2VyLW1hbmlmZXN0LWRpZ2VzdCI6InNoYTI1NjoxOWU4ZWFjZDc1ODc5ZDA1ZjY2MjFjMmVhOGRkOTU1ZTY4ZWUzZTA3YjQxYjlkNTNmNGM4Y2M5OTI5YTY4YTY3In0sInR5cGUiOiJjb3NpZ24gY29udGFpbmVyIGltYWdlIHNpZ25hdHVyZSJ9LCJvcHRpb25hbCI6bnVsbH0=", + "Cert": null, + "Chain": null, + "Bundle": { + "SignedEntryTimestamp": "MEUCIC9oXH9VVP96frVOmDw704FBqMN/Bpm2RMdTm6BtSwL/AiEA6mCIjhV65fYuy4CwjsIzQHi/oW6IBwtd6oCvN2dI6HQ=", + "Payload": { + "body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiJmMjEwNDJjY2RjOGU0ZjA1ZGEzNmE5ZjU4ODg5MmFlZGRlMzYzZTQ2ZWNjZGZjM2MyNzAyMTkwZDU0YTdmZmVlIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FWUNJUUNWaTJGUFI3Mjl1aHAvY3JFdUNTOW9yQzRhMnV0OHN3dDdTUnZXYUVSTGp3SWhBSlM1dzU3MHhsQnJsM2Nhd1Y1akQ1dk85RGh1dkNrdCtzOXJLdGc2NzVKQSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=", + "integratedTime": 1738752154, + "logIndex": 168898587, + "logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d" + } + }, + "RFC3161Timestamp": null + } +] \ No newline at end of file diff --git a/tests/assets/signatures/invalid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/220b52200e3e47b1b42010667fcaa9338681e64dd3e34a34873866cb051d694e.json b/tests/assets/signatures/invalid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/220b52200e3e47b1b42010667fcaa9338681e64dd3e34a34873866cb051d694e.json new file mode 100644 index 0000000..34ff6e4 --- /dev/null +++ b/tests/assets/signatures/invalid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/220b52200e3e47b1b42010667fcaa9338681e64dd3e34a34873866cb051d694e.json @@ -0,0 +1,18 @@ +[ + { + "Base64Signature": "MEQCICi2AOAJbS1k3334VMSo+qxaI4f5VoNnuVExZ4tfIu7rAiAiwuKdo8rGfFMGMLSFSQvoLF3JuwFy4JtNW6kQlwH7vg==", + "Payload": "Invalid base64 payload", + "Cert": null, + "Chain": null, + "Bundle": { + "SignedEntryTimestamp": "MEUCIEvx6NtFeAag9TplqMLjVczT/tC6lpKe9SnrxbehBlxfAiEA07BE3f5JsMLsUsmHD58D6GaZr2yz+yQ66Os2ps8oKz8=", + "Payload": { + "body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiI4YmJmNGRiNjBmMmExM2IyNjI2NTI3MzljNWM5ZTYwNjNiMDYyNjVlODU1Zjc3MTdjMTdlYWY4YzViZTQyYWUyIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FUUNJQ2kyQU9BSmJTMWszMzM0Vk1TbytxeGFJNGY1Vm9ObnVWRXhaNHRmSXU3ckFpQWl3dUtkbzhyR2ZGTUdNTFNGU1F2b0xGM0p1d0Z5NEp0Tlc2a1Fsd0g3dmc9PSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=", + "integratedTime": 1738859497, + "logIndex": 169356501, + "logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d" + } + }, + "RFC3161Timestamp": null + } +] \ No newline at end of file diff --git a/tests/assets/signatures/invalid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/4da441235e84e93518778827a5c5745d532d7a4079886e1647924bee7ef1c14d.json b/tests/assets/signatures/invalid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/4da441235e84e93518778827a5c5745d532d7a4079886e1647924bee7ef1c14d.json new file mode 100644 index 0000000..15e9fae --- /dev/null +++ b/tests/assets/signatures/invalid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/4da441235e84e93518778827a5c5745d532d7a4079886e1647924bee7ef1c14d.json @@ -0,0 +1,18 @@ +[ + { + "Base64Signature": "MEQCIDJxvB7lBU+VNYBD0xw/3Bi8wY7GPJ2fBP7mUFbguApoAiAIpuQT+sgatOY6yXkkA8K/sM40d5/gt7jQywWPbq5+iw==", + "Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hcHlyZ2lvL2RhbmdlcnpvbmUvZGFuZ2Vyem9uZSJ9LCJpbWFnZSI6eyJkb2NrZXItbWFuaWZlc3QtZGlnZXN0Ijoic2hhMjU2OjRkYTQ0MTIzNWU4NGU5MzUxODc3ODgyN2E1YzU3NDVkNTMyZDdhNDA3OTg4NmUxNjQ3OTI0YmVlN2VmMWMxNGQifSwidHlwZSI6ImNvc2lnbiBjb250YWluZXIgaW1hZ2Ugc2lnbmF0dXJlIn0sIm9wdGlvbmFsIjpudWxsfQ==", + "Cert": null, + "Chain": null, + "Bundle": { + "SignedEntryTimestamp": "Invalid signed entry timestamp", + "Payload": { + "body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiIyMGE2ZDU1NTk4Y2U0NjU3NWZkZjViZGU3YzhhYWE2YTU2ZjZlMGRmOWNiYTY1MTJhMDAxODhjMTU1NGIzYjE3In19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FUUNJREp4dkI3bEJVK1ZOWUJEMHh3LzNCaTh3WTdHUEoyZkJQN21VRmJndUFwb0FpQUlwdVFUK3NnYXRPWTZ5WGtrQThLL3NNNDBkNS9ndDdqUXl3V1BicTUraXc9PSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=", + "integratedTime": 1738688492, + "logIndex": 168652066, + "logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d" + } + }, + "RFC3161Timestamp": null + } +] \ No newline at end of file diff --git a/tests/assets/signatures/invalid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/7b21dbdebffed855621dfcdeaa52230dc6566997f852ef5d62b0338b46796e01.json b/tests/assets/signatures/invalid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/7b21dbdebffed855621dfcdeaa52230dc6566997f852ef5d62b0338b46796e01.json new file mode 100644 index 0000000..9594f7f --- /dev/null +++ b/tests/assets/signatures/invalid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/7b21dbdebffed855621dfcdeaa52230dc6566997f852ef5d62b0338b46796e01.json @@ -0,0 +1,18 @@ +[ + { + "Base64Signature": "MEUCIQC2WlJH+B8VuX1c6i4sDwEGEZc53hXUD6/ds9TMJ3HrfwIgCxSnrNYRD2c8XENqfqc+Ik1gx0DK9kPNsn/Lt8V/dCo=", + "Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hbG1ldC9kYW5nZXJ6b25lL2RhbmdlcnpvbmUifSwiaW1hZ2UiOnsiZG9ja2VyLW1hbmlmZXN0LWRpZ2VzdCI6InNoYTI1Njo3YjIxZGJkZWJmZmVkODU1NjIxZGZjZGVhYTUyMjMwZGM2NTY2OTk3Zjg1MmVmNWQ2MmIwMzM4YjQ2Nzk2ZTAxIn0sInR5cGUiOiJjb3NpZ24gY29udGFpbmVyIGltYWdlIHNpZ25hdHVyZSJ9LCJvcHRpb25hbCI6bnVsbH0=", + "Cert": null, + "Chain": null, + "Bundle": { + "SignedEntryTimestamp": "MEYCIQDn04gOHqiZcwUO+NVV9+29+abu6O/k1ve9zatJ3gVu9QIhAJL3E+mqVPdMPfMSdhHt2XDQsYzfRDDJNJEABQlbV3Jg", + "Payload": { + "body": "Invalid bundle payload body", + "integratedTime": 1738862352, + "logIndex": 169369149, + "logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d" + } + }, + "RFC3161Timestamp": null + } +] \ No newline at end of file diff --git a/tests/assets/signatures/tempered/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/19e8eacd75879d05f6621c2ea8dd955e68ee3e07b41b9d53f4c8cc9929a68a67.json b/tests/assets/signatures/tempered/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/19e8eacd75879d05f6621c2ea8dd955e68ee3e07b41b9d53f4c8cc9929a68a67.json new file mode 100644 index 0000000..54a49bf --- /dev/null +++ b/tests/assets/signatures/tempered/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/19e8eacd75879d05f6621c2ea8dd955e68ee3e07b41b9d53f4c8cc9929a68a67.json @@ -0,0 +1,18 @@ +[ + { + "Base64Signature": "MAIhAJWLYU9Hvb26Gn9ysS4JL2isLhra63yzC3tJG9ZoREuPAiEAlLnDnvTGUGuXdxrBXmMPm870OG68KS36z2sq2DrvkkAK", + "Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hbG1ldC9kYW5nZXJ6b25lL2RhbmdlcnpvbmUifSwiaW1hZ2UiOnsiZG9ja2VyLW1hbmlmZXN0LWRpZ2VzdCI6InNoYTI1NjoxOWU4ZWFjZDc1ODc5ZDA1ZjY2MjFjMmVhOGRkOTU1ZTY4ZWUzZTA3YjQxYjlkNTNmNGM4Y2M5OTI5YTY4YTY3In0sInR5cGUiOiJjb3NpZ24gY29udGFpbmVyIGltYWdlIHNpZ25hdHVyZSJ9LCJvcHRpb25hbCI6bnVsbH0=", + "Cert": null, + "Chain": null, + "Bundle": { + "SignedEntryTimestamp": "MEUCIC9oXH9VVP96frVOmDw704FBqMN/Bpm2RMdTm6BtSwL/AiEA6mCIjhV65fYuy4CwjsIzQHi/oW6IBwtd6oCvN2dI6HQ=", + "Payload": { + "body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiJmMjEwNDJjY2RjOGU0ZjA1ZGEzNmE5ZjU4ODg5MmFlZGRlMzYzZTQ2ZWNjZGZjM2MyNzAyMTkwZDU0YTdmZmVlIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FWUNJUUNWaTJGUFI3Mjl1aHAvY3JFdUNTOW9yQzRhMnV0OHN3dDdTUnZXYUVSTGp3SWhBSlM1dzU3MHhsQnJsM2Nhd1Y1akQ1dk85RGh1dkNrdCtzOXJLdGc2NzVKQSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=", + "integratedTime": 1738752154, + "logIndex": 168898587, + "logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d" + } + }, + "RFC3161Timestamp": null + } +] \ No newline at end of file diff --git a/tests/assets/signatures/tempered/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/220b52200e3e47b1b42010667fcaa9338681e64dd3e34a34873866cb051d694e.json b/tests/assets/signatures/tempered/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/220b52200e3e47b1b42010667fcaa9338681e64dd3e34a34873866cb051d694e.json new file mode 100644 index 0000000..8bb1af4 --- /dev/null +++ b/tests/assets/signatures/tempered/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/220b52200e3e47b1b42010667fcaa9338681e64dd3e34a34873866cb051d694e.json @@ -0,0 +1,18 @@ +[ + { + "Base64Signature": "MEQCICi2AOAJbS1k3334VMSo+qxaI4f5VoNnuVExZ4tfIu7rAiAiwuKdo8rGfFMGMLSFSQvoLF3JuwFy4JtNW6kQlwH7vg==", + "Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9oNHh4MHIvZGFuZ2Vyem9uZS9kYW5nZXJ6b25lIn0sImltYWdlIjp7ImRvY2tlci1tYW5pZmVzdC1kaWdlc3QiOiJzaGEyNTY6MjIwYjUyMjAwZTNlNDdiMWI0MjAxMDY2N2ZjYWE5MzM4NjgxZTY0ZGQzZTM0YTM0ODczODY2Y2IwNTFkNjk0ZSJ9LCJ0eXBlIjoiY29zaWduIGNvbnRhaW5lciBpbWFnZSBzaWduYXR1cmUifSwib3B0aW9uYWwiOm51bGx9Cg==", + "Cert": null, + "Chain": null, + "Bundle": { + "SignedEntryTimestamp": "MEUCIEvx6NtFeAag9TplqMLjVczT/tC6lpKe9SnrxbehBlxfAiEA07BE3f5JsMLsUsmHD58D6GaZr2yz+yQ66Os2ps8oKz8=", + "Payload": { + "body": "eyJhcGlWZXJzaW9uIjoiNi42LjYiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiI4YmJmNGRiNjBmMmExM2IyNjI2NTI3MzljNWM5ZTYwNjNiMDYyNjVlODU1Zjc3MTdjMTdlYWY4YzViZTQyYWUyIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FUUNJQ2kyQU9BSmJTMWszMzM0Vk1TbytxeGFJNGY1Vm9ObnVWRXhaNHRmSXU3ckFpQWl3dUtkbzhyR2ZGTUdNTFNGU1F2b0xGM0p1d0Z5NEp0Tlc2a1Fsd0g3dmc9PSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0K", + "integratedTime": 1738859497, + "logIndex": 169356501, + "logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d" + } + }, + "RFC3161Timestamp": null + } +] \ No newline at end of file diff --git a/tests/assets/signatures/tempered/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/README.md b/tests/assets/signatures/tempered/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/README.md new file mode 100644 index 0000000..16819a4 --- /dev/null +++ b/tests/assets/signatures/tempered/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/README.md @@ -0,0 +1 @@ +This folder contain signatures which have been tempered-with. The goal is to have signatures that looks valid, but actually aren't. diff --git a/tests/assets/signatures/valid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/19e8eacd75879d05f6621c2ea8dd955e68ee3e07b41b9d53f4c8cc9929a68a67.json b/tests/assets/signatures/valid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/19e8eacd75879d05f6621c2ea8dd955e68ee3e07b41b9d53f4c8cc9929a68a67.json new file mode 100644 index 0000000..01db986 --- /dev/null +++ b/tests/assets/signatures/valid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/19e8eacd75879d05f6621c2ea8dd955e68ee3e07b41b9d53f4c8cc9929a68a67.json @@ -0,0 +1 @@ +[{"Base64Signature": "MEYCIQCVi2FPR729uhp/crEuCS9orC4a2ut8swt7SRvWaERLjwIhAJS5w570xlBrl3cawV5jD5vO9DhuvCkt+s9rKtg675JA", "Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hbG1ldC9kYW5nZXJ6b25lL2RhbmdlcnpvbmUifSwiaW1hZ2UiOnsiZG9ja2VyLW1hbmlmZXN0LWRpZ2VzdCI6InNoYTI1NjoxOWU4ZWFjZDc1ODc5ZDA1ZjY2MjFjMmVhOGRkOTU1ZTY4ZWUzZTA3YjQxYjlkNTNmNGM4Y2M5OTI5YTY4YTY3In0sInR5cGUiOiJjb3NpZ24gY29udGFpbmVyIGltYWdlIHNpZ25hdHVyZSJ9LCJvcHRpb25hbCI6bnVsbH0=", "Cert": null, "Chain": null, "Bundle": {"SignedEntryTimestamp": "MEUCIC9oXH9VVP96frVOmDw704FBqMN/Bpm2RMdTm6BtSwL/AiEA6mCIjhV65fYuy4CwjsIzQHi/oW6IBwtd6oCvN2dI6HQ=", "Payload": {"body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiJmMjEwNDJjY2RjOGU0ZjA1ZGEzNmE5ZjU4ODg5MmFlZGRlMzYzZTQ2ZWNjZGZjM2MyNzAyMTkwZDU0YTdmZmVlIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FWUNJUUNWaTJGUFI3Mjl1aHAvY3JFdUNTOW9yQzRhMnV0OHN3dDdTUnZXYUVSTGp3SWhBSlM1dzU3MHhsQnJsM2Nhd1Y1akQ1dk85RGh1dkNrdCtzOXJLdGc2NzVKQSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=", "integratedTime": 1738752154, "logIndex": 168898587, "logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d"}}, "RFC3161Timestamp": null}] \ No newline at end of file diff --git a/tests/assets/signatures/valid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/220b52200e3e47b1b42010667fcaa9338681e64dd3e34a34873866cb051d694e.json b/tests/assets/signatures/valid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/220b52200e3e47b1b42010667fcaa9338681e64dd3e34a34873866cb051d694e.json new file mode 100644 index 0000000..8827c9c --- /dev/null +++ b/tests/assets/signatures/valid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/220b52200e3e47b1b42010667fcaa9338681e64dd3e34a34873866cb051d694e.json @@ -0,0 +1 @@ +[{"Base64Signature": "MEQCICi2AOAJbS1k3334VMSo+qxaI4f5VoNnuVExZ4tfIu7rAiAiwuKdo8rGfFMGMLSFSQvoLF3JuwFy4JtNW6kQlwH7vg==", "Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hbG1ldC9kYW5nZXJ6b25lL2RhbmdlcnpvbmUifSwiaW1hZ2UiOnsiZG9ja2VyLW1hbmlmZXN0LWRpZ2VzdCI6InNoYTI1NjoyMjBiNTIyMDBlM2U0N2IxYjQyMDEwNjY3ZmNhYTkzMzg2ODFlNjRkZDNlMzRhMzQ4NzM4NjZjYjA1MWQ2OTRlIn0sInR5cGUiOiJjb3NpZ24gY29udGFpbmVyIGltYWdlIHNpZ25hdHVyZSJ9LCJvcHRpb25hbCI6bnVsbH0=", "Cert": null, "Chain": null, "Bundle": {"SignedEntryTimestamp": "MEUCIEvx6NtFeAag9TplqMLjVczT/tC6lpKe9SnrxbehBlxfAiEA07BE3f5JsMLsUsmHD58D6GaZr2yz+yQ66Os2ps8oKz8=", "Payload": {"body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiI4YmJmNGRiNjBmMmExM2IyNjI2NTI3MzljNWM5ZTYwNjNiMDYyNjVlODU1Zjc3MTdjMTdlYWY4YzViZTQyYWUyIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FUUNJQ2kyQU9BSmJTMWszMzM0Vk1TbytxeGFJNGY1Vm9ObnVWRXhaNHRmSXU3ckFpQWl3dUtkbzhyR2ZGTUdNTFNGU1F2b0xGM0p1d0Z5NEp0Tlc2a1Fsd0g3dmc9PSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=", "integratedTime": 1738859497, "logIndex": 169356501, "logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d"}}, "RFC3161Timestamp": null}] \ No newline at end of file diff --git a/tests/assets/signatures/valid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/4da441235e84e93518778827a5c5745d532d7a4079886e1647924bee7ef1c14d.json b/tests/assets/signatures/valid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/4da441235e84e93518778827a5c5745d532d7a4079886e1647924bee7ef1c14d.json new file mode 100644 index 0000000..fd13e9c --- /dev/null +++ b/tests/assets/signatures/valid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/4da441235e84e93518778827a5c5745d532d7a4079886e1647924bee7ef1c14d.json @@ -0,0 +1 @@ +[{"Base64Signature": "MEQCIDJxvB7lBU+VNYBD0xw/3Bi8wY7GPJ2fBP7mUFbguApoAiAIpuQT+sgatOY6yXkkA8K/sM40d5/gt7jQywWPbq5+iw==", "Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hcHlyZ2lvL2RhbmdlcnpvbmUvZGFuZ2Vyem9uZSJ9LCJpbWFnZSI6eyJkb2NrZXItbWFuaWZlc3QtZGlnZXN0Ijoic2hhMjU2OjRkYTQ0MTIzNWU4NGU5MzUxODc3ODgyN2E1YzU3NDVkNTMyZDdhNDA3OTg4NmUxNjQ3OTI0YmVlN2VmMWMxNGQifSwidHlwZSI6ImNvc2lnbiBjb250YWluZXIgaW1hZ2Ugc2lnbmF0dXJlIn0sIm9wdGlvbmFsIjpudWxsfQ==", "Cert": null, "Chain": null, "Bundle": {"SignedEntryTimestamp": "MEYCIQDuuuHoyZ2i4HKxik4Ju/MWkELwc1w5SfzcpCV7G+vZHAIhAO25R/+lIfQ/kMfC4PfeoWDwLpvnH9cq6dVSzl12i1su", "Payload": {"body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiIyMGE2ZDU1NTk4Y2U0NjU3NWZkZjViZGU3YzhhYWE2YTU2ZjZlMGRmOWNiYTY1MTJhMDAxODhjMTU1NGIzYjE3In19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FUUNJREp4dkI3bEJVK1ZOWUJEMHh3LzNCaTh3WTdHUEoyZkJQN21VRmJndUFwb0FpQUlwdVFUK3NnYXRPWTZ5WGtrQThLL3NNNDBkNS9ndDdqUXl3V1BicTUraXc9PSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=", "integratedTime": 1738688492, "logIndex": 168652066, "logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d"}}, "RFC3161Timestamp": null}] \ No newline at end of file diff --git a/tests/assets/signatures/valid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/7b21dbdebffed855621dfcdeaa52230dc6566997f852ef5d62b0338b46796e01.json b/tests/assets/signatures/valid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/7b21dbdebffed855621dfcdeaa52230dc6566997f852ef5d62b0338b46796e01.json new file mode 100644 index 0000000..e857c4b --- /dev/null +++ b/tests/assets/signatures/valid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/7b21dbdebffed855621dfcdeaa52230dc6566997f852ef5d62b0338b46796e01.json @@ -0,0 +1 @@ +[{"Base64Signature": "MEUCIQC2WlJH+B8VuX1c6i4sDwEGEZc53hXUD6/ds9TMJ3HrfwIgCxSnrNYRD2c8XENqfqc+Ik1gx0DK9kPNsn/Lt8V/dCo=", "Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hbG1ldC9kYW5nZXJ6b25lL2RhbmdlcnpvbmUifSwiaW1hZ2UiOnsiZG9ja2VyLW1hbmlmZXN0LWRpZ2VzdCI6InNoYTI1Njo3YjIxZGJkZWJmZmVkODU1NjIxZGZjZGVhYTUyMjMwZGM2NTY2OTk3Zjg1MmVmNWQ2MmIwMzM4YjQ2Nzk2ZTAxIn0sInR5cGUiOiJjb3NpZ24gY29udGFpbmVyIGltYWdlIHNpZ25hdHVyZSJ9LCJvcHRpb25hbCI6bnVsbH0=", "Cert": null, "Chain": null, "Bundle": {"SignedEntryTimestamp": "MEYCIQDn04gOHqiZcwUO+NVV9+29+abu6O/k1ve9zatJ3gVu9QIhAJL3E+mqVPdMPfMSdhHt2XDQsYzfRDDJNJEABQlbV3Jg", "Payload": {"body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiIzZWQwNWJlYTc2ZWFmMzBmYWM1NzBlNzhlODBlZmQxNDNiZWQxNzFjM2VjMDY5MWI2MDU3YjdhMDAzNGEyMzhlIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FVUNJUUMyV2xKSCtCOFZ1WDFjNmk0c0R3RUdFWmM1M2hYVUQ2L2RzOVRNSjNIcmZ3SWdDeFNuck5ZUkQyYzhYRU5xZnFjK0lrMWd4MERLOWtQTnNuL0x0OFYvZENvPSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=", "integratedTime": 1738862352, "logIndex": 169369149, "logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d"}}, "RFC3161Timestamp": null}] \ No newline at end of file diff --git a/tests/assets/signatures/valid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/fa948726aac29a6ac49f01ec8fbbac18522b35b2491fdf716236a0b3502a2ca7.json b/tests/assets/signatures/valid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/fa948726aac29a6ac49f01ec8fbbac18522b35b2491fdf716236a0b3502a2ca7.json new file mode 100644 index 0000000..660dbbf --- /dev/null +++ b/tests/assets/signatures/valid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/fa948726aac29a6ac49f01ec8fbbac18522b35b2491fdf716236a0b3502a2ca7.json @@ -0,0 +1 @@ +[{"Base64Signature": "MEQCIHqXEMuAmt1pFCsHC71+ejlG5kjKrf1+AQW202OY3vhsAiA0BoDAVgAk9K7SgIRBpIV6u0veyB1iypzV0DteNh3IoQ==", "Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hbG1ldC9kYW5nZXJ6b25lL2RhbmdlcnpvbmUifSwiaW1hZ2UiOnsiZG9ja2VyLW1hbmlmZXN0LWRpZ2VzdCI6InNoYTI1NjpmYTk0ODcyNmFhYzI5YTZhYzQ5ZjAxZWM4ZmJiYWMxODUyMmIzNWIyNDkxZmRmNzE2MjM2YTBiMzUwMmEyY2E3In0sInR5cGUiOiJjb3NpZ24gY29udGFpbmVyIGltYWdlIHNpZ25hdHVyZSJ9LCJvcHRpb25hbCI6bnVsbH0=", "Cert": null, "Chain": null, "Bundle": {"SignedEntryTimestamp": "MEUCIQCrZ+2SSYdpIOEbyUXXaBxeqT8RTujpqdXipls9hmNvDgIgdWV84PiCY2cI49QjHjun7lj25/znGMDiwjCuPjIPA6Q=", "Payload": {"body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiI5ZjcwM2I4NTM4MjM4N2U2OTgwNzYxNDg1YzU0NGIzNmJmMThmNTA5ODQwMTMxYzRmOTJhMjE4OTI3MTJmNDJmIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FUUNJSHFYRU11QW10MXBGQ3NIQzcxK2VqbEc1a2pLcmYxK0FRVzIwMk9ZM3Zoc0FpQTBCb0RBVmdBazlLN1NnSVJCcElWNnUwdmV5QjFpeXB6VjBEdGVOaDNJb1E9PSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=", "integratedTime": 1737478056, "logIndex": 164177381, "logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d"}}, "RFC3161Timestamp": null}, {"Base64Signature": "MEYCIQDg8MeymBLOn+Khue0yK1yQy4Fu/+GXmyC/xezXO/p1JgIhAN6QLojKzkZGxyYirbqRbZCVcIM4YN3Y18FXwpW4RuUy", "Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hbG1ldC9kYW5nZXJ6b25lL2RhbmdlcnpvbmUifSwiaW1hZ2UiOnsiZG9ja2VyLW1hbmlmZXN0LWRpZ2VzdCI6InNoYTI1NjpmYTk0ODcyNmFhYzI5YTZhYzQ5ZjAxZWM4ZmJiYWMxODUyMmIzNWIyNDkxZmRmNzE2MjM2YTBiMzUwMmEyY2E3In0sInR5cGUiOiJjb3NpZ24gY29udGFpbmVyIGltYWdlIHNpZ25hdHVyZSJ9LCJvcHRpb25hbCI6bnVsbH0=", "Cert": null, "Chain": null, "Bundle": {"SignedEntryTimestamp": "MEUCIQCQLlrH2xo/bA6r386vOwA0OjUe0TqcxROT/Wo220jvGgIgPgRlKnQxWoXlD/Owf1Ogk5XlfXAt2f416LDbk4AoEvk=", "Payload": {"body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiI5ZjcwM2I4NTM4MjM4N2U2OTgwNzYxNDg1YzU0NGIzNmJmMThmNTA5ODQwMTMxYzRmOTJhMjE4OTI3MTJmNDJmIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FWUNJUURnOE1leW1CTE9uK0todWUweUsxeVF5NEZ1LytHWG15Qy94ZXpYTy9wMUpnSWhBTjZRTG9qS3prWkd4eVlpcmJxUmJaQ1ZjSU00WU4zWTE4Rlh3cFc0UnVVeSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=", "integratedTime": 1737557525, "logIndex": 164445483, "logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d"}}, "RFC3161Timestamp": null}, {"Base64Signature": "MEQCIEhUVYVW6EdovGDSSZt1Ffc86OfzEKAas94M4eFK7hoFAiA4+6219LktmgJSKuc2ObsnL5QjHyNLk58BwY0s8gBHbQ==", "Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hbG1ldC9kYW5nZXJ6b25lL2RhbmdlcnpvbmUifSwiaW1hZ2UiOnsiZG9ja2VyLW1hbmlmZXN0LWRpZ2VzdCI6InNoYTI1NjpmYTk0ODcyNmFhYzI5YTZhYzQ5ZjAxZWM4ZmJiYWMxODUyMmIzNWIyNDkxZmRmNzE2MjM2YTBiMzUwMmEyY2E3In0sInR5cGUiOiJjb3NpZ24gY29udGFpbmVyIGltYWdlIHNpZ25hdHVyZSJ9LCJvcHRpb25hbCI6bnVsbH0=", "Cert": null, "Chain": null, "Bundle": {"SignedEntryTimestamp": "MEQCIDRUTMwL+/eW79ARRLE8h/ByCrvo0rOn3vUYQg1E6KIBAiBi/bzoqcL2Ik27KpwfFosww4l7yI+9IqwCvUlkQgEB7g==", "Payload": {"body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiI5ZjcwM2I4NTM4MjM4N2U2OTgwNzYxNDg1YzU0NGIzNmJmMThmNTA5ODQwMTMxYzRmOTJhMjE4OTI3MTJmNDJmIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FUUNJRWhVVllWVzZFZG92R0RTU1p0MUZmYzg2T2Z6RUtBYXM5NE00ZUZLN2hvRkFpQTQrNjIxOUxrdG1nSlNLdWMyT2Jzbkw1UWpIeU5MazU4QndZMHM4Z0JIYlE9PSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=", "integratedTime": 1737567664, "logIndex": 164484602, "logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d"}}, "RFC3161Timestamp": null}] \ No newline at end of file diff --git a/tests/assets/test.pub.key b/tests/assets/test.pub.key new file mode 100644 index 0000000..a36dd82 --- /dev/null +++ b/tests/assets/test.pub.key @@ -0,0 +1,4 @@ +-----BEGIN PUBLIC KEY----- +MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEoE0CXLGff79fR8KyPnSvOY74UBkt +2sLi+aVFUzS1Qwt4wosxHhcDN2B6QSsLlvgsH82q6qcA6PL2SdS/p4jWGA== +-----END PUBLIC KEY----- diff --git a/tests/conftest.py b/tests/conftest.py index b55b5ca..64f1a44 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -13,6 +13,13 @@ from dangerzone.gui import Application sys.dangerzone_dev = True # type: ignore[attr-defined] +ASSETS_PATH = Path(__file__).parent / "assets" +TEST_PUBKEY_PATH = ASSETS_PATH / "test.pub.key" +INVALID_SIGNATURES_PATH = ASSETS_PATH / "signatures" / "invalid" +VALID_SIGNATURES_PATH = ASSETS_PATH / "signatures" / "valid" +TEMPERED_SIGNATURES_PATH = ASSETS_PATH / "signatures" / "tempered" + + # Use this fixture to make `pytest-qt` invoke our custom QApplication. # See https://pytest-qt.readthedocs.io/en/latest/qapplication.html#testing-custom-qapplications @pytest.fixture(scope="session") @@ -132,6 +139,7 @@ for_each_doc = pytest.mark.parametrize( "doc", test_docs, ids=[str(doc.name) for doc in test_docs] ) + # External Docs - base64 docs encoded for externally sourced documents # XXX to reduce the chance of accidentally opening them test_docs_external_dir = Path(__file__).parent.joinpath(SAMPLE_EXTERNAL_DIRECTORY) diff --git a/tests/test_registry.py b/tests/test_registry.py new file mode 100644 index 0000000..efbf576 --- /dev/null +++ b/tests/test_registry.py @@ -0,0 +1,238 @@ +import hashlib + +import pytest +import requests +from pytest_mock import MockerFixture + +from dangerzone.updater.registry import ( + Image, + _get_auth_header, + _url, + get_manifest, + get_manifest_digest, + list_tags, + parse_image_location, +) + + +def test_parse_image_location_no_tag(): + """Test that parse_image_location correctly handles an image location without a tag.""" + image_str = "ghcr.io/freedomofpress/dangerzone" + image = parse_image_location(image_str) + + assert isinstance(image, Image) + assert image.registry == "ghcr.io" + assert image.namespace == "freedomofpress" + assert image.image_name == "dangerzone" + assert image.tag == "latest" # Default tag should be "latest" + assert image.digest is None + + +def test_parse_image_location_with_tag(): + """Test that parse_image_location correctly handles an image location with a tag.""" + image_str = "ghcr.io/freedomofpress/dangerzone:v0.4.2" + image = parse_image_location(image_str) + + assert isinstance(image, Image) + assert image.registry == "ghcr.io" + assert image.namespace == "freedomofpress" + assert image.image_name == "dangerzone" + assert image.tag == "v0.4.2" + + +def test_parse_image_location_tag_plus_digest(): + """Test that parse_image_location handles an image location with a tag that includes a digest.""" + image_str = ( + "ghcr.io/freedomofpress/dangerzone" + ":20250205-0.8.0-148-ge67fbc1" + "@sha256:19e8eacd75879d05f6621c2ea8dd955e68ee3e07b41b9d53f4c8cc9929a68a67" + ) + + image = parse_image_location(image_str) + + assert isinstance(image, Image) + assert image.registry == "ghcr.io" + assert image.namespace == "freedomofpress" + assert image.image_name == "dangerzone" + assert image.tag == "20250205-0.8.0-148-ge67fbc1" + assert ( + image.digest + == "sha256:19e8eacd75879d05f6621c2ea8dd955e68ee3e07b41b9d53f4c8cc9929a68a67" + ) + + +def test_parse_invalid_image_location(): + """Test that parse_image_location raises an error for invalid image locations.""" + invalid_image_locations = [ + "ghcr.io/dangerzone", # Missing namespace + "ghcr.io/freedomofpress/dangerzone:", # Empty tag + "freedomofpress/dangerzone", # Missing registry + "ghcr.io:freedomofpress/dangerzone", # Invalid format + "", # Empty string + ] + + for invalid_image in invalid_image_locations: + with pytest.raises(ValueError, match="Malformed image location"): + parse_image_location(invalid_image) + + +def test_list_tags(mocker: MockerFixture): + """Test that list_tags correctly retrieves tags from the registry.""" + # Mock the authentication response + image_str = "ghcr.io/freedomofpress/dangerzone" + + # Mock requests.get to return appropriate values for both calls + mock_response_auth = mocker.Mock() + mock_response_auth.json.return_value = {"token": "dummy_token"} + mock_response_auth.raise_for_status.return_value = None + + mock_response_tags = mocker.Mock() + mock_response_tags.json.return_value = { + "tags": ["v0.4.0", "v0.4.1", "v0.4.2", "latest"] + } + mock_response_tags.raise_for_status.return_value = None + + # Setup the mock to return different responses for each URL + def mock_get(url, **kwargs): + if "token" in url: + return mock_response_auth + else: + return mock_response_tags + + mocker.patch("requests.get", side_effect=mock_get) + + # Call the function + tags = list_tags(image_str) + + # Verify the result + assert tags == ["v0.4.0", "v0.4.1", "v0.4.2", "latest"] + + +def test_list_tags_auth_error(mocker: MockerFixture): + """Test that list_tags handles authentication errors correctly.""" + image_str = "ghcr.io/freedomofpress/dangerzone" + + # Mock requests.get to raise an HTTPError + mock_response = mocker.Mock() + mock_response.raise_for_status.side_effect = requests.exceptions.HTTPError( + "401 Client Error: Unauthorized" + ) + + mocker.patch("requests.get", return_value=mock_response) + + # Call the function and expect an error + with pytest.raises(requests.exceptions.HTTPError): + list_tags(image_str) + + +def test_list_tags_registry_error(mocker: MockerFixture): + """Test that list_tags handles registry errors correctly.""" + image_str = "ghcr.io/freedomofpress/dangerzone" + + # Mock requests.get to return success for auth but error for tags + mock_response_auth = mocker.Mock() + mock_response_auth.json.return_value = {"token": "dummy_token"} + mock_response_auth.raise_for_status.return_value = None + + mock_response_tags = mocker.Mock() + mock_response_tags.raise_for_status.side_effect = requests.exceptions.HTTPError( + "404 Client Error: Not Found" + ) + + # Setup the mock to return different responses for each URL + def mock_get(url, **kwargs): + if "token" in url: + return mock_response_auth + else: + return mock_response_tags + + mocker.patch("requests.get", side_effect=mock_get) + + # Call the function and expect an error + with pytest.raises(requests.exceptions.HTTPError): + list_tags(image_str) + + +def test_get_manifest(mocker: MockerFixture): + """Test that get_manifest correctly retrieves manifests from the registry.""" + image_str = "ghcr.io/freedomofpress/dangerzone:v0.4.2" + + # Mock the responses + manifest_content = { + "schemaVersion": 2, + "mediaType": "application/vnd.docker.distribution.manifest.v2+json", + "config": { + "mediaType": "application/vnd.docker.container.image.v1+json", + "size": 1234, + "digest": "sha256:abc123def456", + }, + "layers": [ + { + "mediaType": "application/vnd.docker.image.rootfs.diff.tar.gzip", + "size": 12345, + "digest": "sha256:layer1", + } + ], + } + + mock_response_auth = mocker.Mock() + mock_response_auth.json.return_value = {"token": "dummy_token"} + mock_response_auth.raise_for_status.return_value = None + + mock_response_manifest = mocker.Mock() + mock_response_manifest.json.return_value = manifest_content + mock_response_manifest.status_code = 200 + mock_response_manifest.raise_for_status.return_value = None + + # Setup the mock to return different responses for each URL + def mock_get(url, **kwargs): + if "token" in url: + return mock_response_auth + else: + return mock_response_manifest + + mocker.patch("requests.get", side_effect=mock_get) + + # Call the function + response = get_manifest(image_str) + + # Verify the result + assert response.status_code == 200 + assert response.json() == manifest_content + + +def test_get_manifest_digest(): + """Test that get_manifest_digest correctly calculates the manifest digest.""" + # Create a sample manifest content + manifest_content = b'{"schemaVersion":2,"mediaType":"application/vnd.docker.distribution.manifest.v2+json"}' + + # Calculate the expected digest manually + import hashlib + + expected_digest = hashlib.sha256(manifest_content).hexdigest() + + # Call the function with the content directly + digest = get_manifest_digest("unused_image_str", manifest_content) + + # Verify the result + assert digest == expected_digest + + +def test_get_manifest_digest_from_registry(mocker: MockerFixture): + """Test that get_manifest_digest correctly retrieves and calculates digests from the registry.""" + image_str = "ghcr.io/freedomofpress/dangerzone:v0.4.2" + + # Sample manifest content + manifest_content = b'{"schemaVersion":2,"mediaType":"application/vnd.docker.distribution.manifest.v2+json"}' + expected_digest = hashlib.sha256(manifest_content).hexdigest() + + # Mock get_manifest + mock_response = mocker.Mock() + mock_response.content = manifest_content + mocker.patch("dangerzone.updater.registry.get_manifest", return_value=mock_response) + + # Call the function + digest = get_manifest_digest(image_str) + + # Verify the result + assert digest == expected_digest diff --git a/tests/test_signatures.py b/tests/test_signatures.py index 5f7a846..b744db8 100644 --- a/tests/test_signatures.py +++ b/tests/test_signatures.py @@ -108,7 +108,8 @@ def test_get_log_index_from_missing_log_index(): def test_upgrade_container_image_if_already_up_to_date(mocker): mocker.patch( - "dangerzone.updater.signatures.is_update_available", return_value=(False, None) + "dangerzone.updater.registry.is_new_remote_image_available", + return_value=(False, None), ) with pytest.raises(errors.ImageAlreadyUpToDate): upgrade_container_image( @@ -118,7 +119,7 @@ def test_upgrade_container_image_if_already_up_to_date(mocker): def test_upgrade_container_without_signatures(mocker): mocker.patch( - "dangerzone.updater.signatures.is_update_available", + "dangerzone.updater.registry.is_new_remote_image_available", return_value=(True, "sha256:123456"), ) mocker.patch("dangerzone.updater.signatures.get_remote_signatures", return_value=[]) @@ -139,7 +140,7 @@ def test_upgrade_container_lower_log_index(mocker): signatures_path=VALID_SIGNATURES_PATH, ) mocker.patch( - "dangerzone.updater.signatures.is_update_available", + "dangerzone.updater.registry.is_new_remote_image_available", return_value=( True, image_digest, @@ -208,6 +209,19 @@ def test_get_remote_signatures_cosign_error(mocker, fp: FakeProcess): get_remote_signatures(image, digest) +def test_get_remote_signatures_cosign_error(mocker, fp: FakeProcess): + image = "ghcr.io/freedomofpress/dangerzone/dangerzone" + digest = "123456" + mocker.patch("dangerzone.updater.cosign.ensure_installed", return_value=True) + fp.register_subprocess( + ["cosign", "download", "signature", f"{image}@sha256:{digest}"], + returncode=1, + stderr="Error: no signatures associated", + ) + with pytest.raises(errors.NoRemoteSignatures): + get_remote_signatures(image, digest) + + def test_store_signatures_with_different_digests( valid_signature, signature_other_digest, mocker, tmp_path ): @@ -239,7 +253,7 @@ def test_store_signatures_with_different_digests( # Verify that the signatures file was not created assert not (signatures_path / f"{image_digest}.json").exists() - # Verify that the log index file was not updated + # Verify that the log index file was not created assert not (signatures_path / "last_log_index").exists() @@ -309,6 +323,23 @@ def test_stores_signatures_updates_last_log_index(valid_signature, mocker, tmp_p def test_stores_signatures_updates_last_log_index(): pass + # Mock the signatures path + signatures_path = tmp_path / "signatures" + signatures_path.mkdir() + mocker.patch("dangerzone.updater.signatures.SIGNATURES_PATH", signatures_path) + + # Mock get_log_index_from_signatures + mocker.patch( + "dangerzone.updater.signatures.get_log_index_from_signatures", + return_value=100, + ) + + # Mock get_last_log_index + mocker.patch( + "dangerzone.updater.signatures.get_last_log_index", + return_value=50, + ) + def test_get_file_digest(): # Mock the signatures path @@ -335,31 +366,79 @@ def test_get_file_digest(): assert f.read() == "100" -def test_is_update_available_when_no_local_image(mocker): +def test_is_update_available_when_remote_image_available(mocker): """ - Test that is_update_available returns True when no local image is - currently present. + Test that is_update_available returns True when a new image is available + and all checks pass """ - # Mock container_image_exists to return False + # Mock is_new_remote_image_available to return True and digest mocker.patch( - "dangerzone.container_utils.get_local_image_digest", - side_effect=dzerrors.ImageNotPresentException, + "dangerzone.updater.registry.is_new_remote_image_available", + return_value=(True, RANDOM_DIGEST), ) - # Mock get_manifest_digest to return a digest + # Mock check_signatures_and_logindex to not raise any exceptions mocker.patch( - "dangerzone.updater.registry.get_manifest_digest", - return_value=RANDOM_DIGEST, + "dangerzone.updater.signatures.check_signatures_and_logindex", + return_value=[{"some": "signature"}], ) # Call is_update_available - update_available, digest = is_update_available("ghcr.io/freedomofpress/dangerzone") + update_available, digest = is_update_available( + "ghcr.io/freedomofpress/dangerzone", "test.pub" + ) # Verify the result assert update_available is True assert digest == RANDOM_DIGEST +def test_is_update_available_when_no_remote_image(mocker): + """ + Test that is_update_available returns False when no remote image is available + """ + # Mock is_new_remote_image_available to return False + mocker.patch( + "dangerzone.updater.registry.is_new_remote_image_available", + return_value=(False, None), + ) + + # Call is_update_available + update_available, digest = is_update_available( + "ghcr.io/freedomofpress/dangerzone", "test.pub" + ) + + # Verify the result + assert update_available is False + assert digest is None + + +def test_is_update_available_with_invalid_log_index(mocker): + """ + Test that is_update_available returns False when the log index is invalid + """ + # Mock is_new_remote_image_available to return True + mocker.patch( + "dangerzone.updater.registry.is_new_remote_image_available", + return_value=(True, RANDOM_DIGEST), + ) + + # Mock check_signatures_and_logindex to raise InvalidLogIndex + mocker.patch( + "dangerzone.updater.signatures.check_signatures_and_logindex", + side_effect=errors.InvalidLogIndex("Invalid log index"), + ) + + # Call is_update_available + update_available, digest = is_update_available( + "ghcr.io/freedomofpress/dangerzone", "test.pub" + ) + + # Verify the result + assert update_available is False + assert digest is None + + def test_verify_signature(valid_signature): """Test that verify_signature raises an error when the payload digest doesn't match.""" verify_signature( @@ -383,3 +462,7 @@ def test_verify_signature_tempered(tempered_signature): def test_verify_signatures_empty_list(): with pytest.raises(errors.SignatureVerificationError): verify_signatures([], "1234", TEST_PUBKEY_PATH) + + +def test_verify_signatures_not_0(): + pass From 8d6e5cb8b85164c3b697fccb17353db4fa290469 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Wed, 26 Feb 2025 19:22:33 +0100 Subject: [PATCH 08/24] Provide an `is_update_available` function This function does all the needed checks before returning `True`, making it a good external API. Under the hood, the registry now has an `is_new_remote_image_available` which is just for checking the presence of a new image, but doesn't do any verirications on it, and there is also a new `check_signatures_and_logindex` that ensures that these two are valid. --- dangerzone/updater/registry.py | 23 +++++++++++++ dangerzone/updater/signatures.py | 57 ++++++++++++++++++-------------- 2 files changed, 56 insertions(+), 24 deletions(-) diff --git a/dangerzone/updater/registry.py b/dangerzone/updater/registry.py index fe57364..0caee26 100644 --- a/dangerzone/updater/registry.py +++ b/dangerzone/updater/registry.py @@ -5,6 +5,8 @@ from typing import Dict, Optional, Tuple import requests +from .. import container_utils as runtime +from .. import errors as dzerrors from . import errors, log __all__ = [ @@ -114,3 +116,24 @@ def get_manifest_digest( tag_manifest_content = get_manifest(image_str).content return sha256(tag_manifest_content).hexdigest() + + +def is_new_remote_image_available(image_str: str) -> Tuple[bool, str]: + """ + Check if a new remote image is available on the registry. + """ + remote_digest = get_manifest_digest(image_str) + image = parse_image_location(image_str) + if image.digest: + local_digest = image.digest + else: + try: + local_digest = runtime.get_local_image_digest(image_str) + except dzerrors.ImageNotPresentException: + log.debug("No local image found") + return True, remote_digest + + log.debug("Remote digest: %s", remote_digest) + log.debug("Local digest: %s", local_digest) + + return (remote_digest != local_digest, remote_digest) diff --git a/dangerzone/updater/signatures.py b/dangerzone/updater/signatures.py index d452967..0c58b8c 100644 --- a/dangerzone/updater/signatures.py +++ b/dangerzone/updater/signatures.py @@ -136,19 +136,39 @@ class Signature: return full_digest.replace("sha256:", "") -def is_update_available(image: str) -> Tuple[bool, Optional[str]]: - remote_digest = registry.get_manifest_digest(image) +def is_update_available(image_str: str, pubkey: str) -> Tuple[bool, Optional[str]]: + """ + Check if a new image is available, doing all the necessary checks ensuring it + would be safe to upgrade. + """ + new_image_available, remote_digest = registry.is_new_remote_image_available( + image_str + ) + if not new_image_available: + return False, None + try: - local_digest = runtime.get_local_image_digest(image) - except dzerrors.ImageNotPresentException: - log.debug("No local image found") + check_signatures_and_logindex(image_str, remote_digest, pubkey) return True, remote_digest - log.debug("Remote digest: %s", remote_digest) - log.debug("Local digest: %s", local_digest) - has_update = remote_digest != local_digest - if has_update: - return True, remote_digest - return False, None + except errors.InvalidLogIndex: + return False, None + + +def check_signatures_and_logindex( + image_str: str, remote_digest: str, pubkey: str +) -> list[Dict]: + signatures = get_remote_signatures(image_str, remote_digest) + verify_signatures(signatures, remote_digest, pubkey) + + incoming_log_index = get_log_index_from_signatures(signatures) + last_log_index = get_last_log_index() + + if incoming_log_index < last_log_index: + raise errors.InvalidLogIndex( + f"The incoming log index ({incoming_log_index}) is " + f"lower than the last known log index ({last_log_index})" + ) + return signatures def verify_signatures( @@ -461,22 +481,11 @@ def prepare_airgapped_archive(image_name: str, destination: str) -> None: def upgrade_container_image(image: str, manifest_digest: str, pubkey: str) -> str: """Verify and upgrade the image to the latest, if signed.""" - update_available, _ = is_update_available(image) + update_available, remote_digest = registry.is_new_remote_image_available(image) if not update_available: raise errors.ImageAlreadyUpToDate("The image is already up to date") - signatures = get_remote_signatures(image, manifest_digest) - verify_signatures(signatures, manifest_digest, pubkey) - - # Only upgrade if the log index is higher than the last known one - incoming_log_index = get_log_index_from_signatures(signatures) - last_log_index = get_last_log_index() - - if incoming_log_index < last_log_index: - raise errors.InvalidLogIndex( - "Trying to upgrade to an image with a lower log index" - ) - + signatures = check_signatures_and_logindex(image, remote_digest, pubkey) runtime.container_pull(image, manifest_digest) # Store the signatures just now to avoid storing them unverified From d91a09a2990e7a4d93433b274abc7553772cb882 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Thu, 27 Feb 2025 17:55:00 +0100 Subject: [PATCH 09/24] Split updater GUI code from the code checking for release updates The code making the actual requests and checks now lives in the `updater.releases` module. The code should be easier to read and to reason about. Tests have been updated to reflect this. --- dangerzone/gui/__init__.py | 11 +- dangerzone/gui/main_window.py | 2 +- dangerzone/gui/updater.py | 270 +++++---------------------------- dangerzone/settings.py | 1 + dangerzone/updater/errors.py | 6 + dangerzone/updater/releases.py | 191 +++++++++++++++++++++++ dangerzone/util.py | 1 + tests/gui/test_main_window.py | 18 ++- tests/gui/test_updater.py | 129 ++++++++-------- 9 files changed, 320 insertions(+), 309 deletions(-) create mode 100644 dangerzone/updater/releases.py diff --git a/dangerzone/gui/__init__.py b/dangerzone/gui/__init__.py index 5a4c26c..248954c 100644 --- a/dangerzone/gui/__init__.py +++ b/dangerzone/gui/__init__.py @@ -24,6 +24,8 @@ from ..document import Document from ..isolation_provider.container import Container from ..isolation_provider.dummy import Dummy from ..isolation_provider.qubes import Qubes, is_qubes_native_conversion +from ..updater import errors as updater_errors +from ..updater import releases from ..util import get_resource_path, get_version from .logic import DangerzoneGui from .main_window import MainWindow @@ -161,16 +163,15 @@ def gui_main(dummy_conversion: bool, filenames: Optional[List[str]]) -> bool: window.register_update_handler(updater.finished) log.debug("Consulting updater settings before checking for updates") - if updater.should_check_for_updates(): + should_check = updater.should_check_for_updates() + + if should_check: log.debug("Checking for updates") updater.start() else: log.debug("Will not check for updates, based on updater settings") - # Ensure the status of the toggle updates checkbox is updated, after the user is - # prompted to enable updates. - window.toggle_updates_action.setChecked(bool(updater.check)) - + window.toggle_updates_action.setChecked(should_check) if filenames: open_files(filenames) diff --git a/dangerzone/gui/main_window.py b/dangerzone/gui/main_window.py index fd86817..a17cb1a 100644 --- a/dangerzone/gui/main_window.py +++ b/dangerzone/gui/main_window.py @@ -26,9 +26,9 @@ else: from .. import errors from ..document import SAFE_EXTENSION, Document from ..isolation_provider.qubes import is_qubes_native_conversion +from ..updater.releases import UpdateReport from ..util import format_exception, get_resource_path, get_version from .logic import Alert, CollapsibleBox, DangerzoneGui, UpdateDialog -from .updater import UpdateReport log = logging.getLogger(__name__) diff --git a/dangerzone/gui/updater.py b/dangerzone/gui/updater.py index 396de21..f4dae0b 100644 --- a/dangerzone/gui/updater.py +++ b/dangerzone/gui/updater.py @@ -1,15 +1,7 @@ -"""A module that contains the logic for checking for updates.""" - -import json import logging -import platform -import sys -import time import typing from typing import Optional -from packaging import version - if typing.TYPE_CHECKING: from PySide2 import QtCore, QtWidgets else: @@ -18,36 +10,33 @@ else: except ImportError: from PySide2 import QtCore, QtWidgets -# XXX implict import for "markdown" module required for Cx_Freeze to build on Windows -# See https://github.com/freedomofpress/dangerzone/issues/501 -import html.parser # noqa: F401 - -import markdown -import requests - -from ..util import get_version +from ..updater import errors, releases from .logic import Alert, DangerzoneGui log = logging.getLogger(__name__) - MSG_CONFIRM_UPDATE_CHECKS = """\ -

Do you want Dangerzone to automatically check for updates?

+

+ Do you want Dangerzone to automatically check for updates and apply them? +

-

If you accept, Dangerzone will check the +

If you accept, Dangerzone will check for updates of the sandbox and apply them +automatically. This will ensure that you always have the latest version of the sandbox, +which is critical for the software to operate securely.

+ +

Sandbox updates may include security patches and bug fixes, but won't include new features.

+ +

Additionally, Dangerzone will check the latest releases page -in github.com on startup. Otherwise it will make no network requests and -won't inform you about new releases.

+in github.com, and inform you about new releases. + +Otherwise it will make no network requests and won't inform you about new releases.

If you prefer another way of getting notified about new releases, we suggest adding to your RSS reader our -Mastodon feed. For more information -about updates, check -this webpage.

+Dangerzone News feed.

""" -UPDATE_CHECK_COOLDOWN_SECS = 60 * 60 * 12 # Check for updates at most every 12 hours. - class UpdateCheckPrompt(Alert): """The prompt that asks the users if they want to enable update checks.""" @@ -55,7 +44,7 @@ class UpdateCheckPrompt(Alert): x_pressed = False def closeEvent(self, event: QtCore.QEvent) -> None: - """Detect when a user has pressed "X" in the title bar. + """Detect when a user has pressed "X" in the title bar (to close the dialog). This function is called when a user clicks on "X" in the title bar. We want to differentiate between the user clicking on "Cancel" and clicking on "X", since @@ -76,72 +65,32 @@ class UpdateCheckPrompt(Alert): return buttons_layout -class UpdateReport: - """A report for an update check.""" - - def __init__( - self, - version: Optional[str] = None, - changelog: Optional[str] = None, - error: Optional[str] = None, - ): - self.version = version - self.changelog = changelog - self.error = error - - def empty(self) -> bool: - return self.version is None and self.changelog is None and self.error is None - - class UpdaterThread(QtCore.QThread): """Check asynchronously for Dangerzone updates. - The Updater class is mainly responsible for the following: - - 1. Asking the user if they want to enable update checks or not. - 2. Determining when it's the right time to check for updates. - 3. Hitting the GitHub releases API and learning about updates. + The Updater class is mainly responsible for + asking the user if they want to enable update checks or not. Since checking for updates is a task that may take some time, we perform it - asynchronously, in a Qt thread. This thread then triggers a signal, and informs - whoever has connected to it. + asynchronously, in a Qt thread. + + When finished, this thread triggers a signal with the results. """ - finished = QtCore.Signal(UpdateReport) - - GH_RELEASE_URL = ( - "https://api.github.com/repos/freedomofpress/dangerzone/releases/latest" - ) - REQ_TIMEOUT = 15 + finished = QtCore.Signal(releases.UpdateReport) def __init__(self, dangerzone: DangerzoneGui): super().__init__() self.dangerzone = dangerzone - ########### - # Helpers for updater settings - # - # These helpers make it easy to retrieve specific updater-related settings, as well - # as save the settings file, only when necessary. - - @property - def check(self) -> Optional[bool]: - return self.dangerzone.settings.get("updater_check") - - @check.setter - def check(self, val: bool) -> None: - self.dangerzone.settings.set("updater_check", val, autosave=True) - def prompt_for_checks(self) -> Optional[bool]: """Ask the user if they want to be informed about Dangerzone updates.""" log.debug("Prompting the user for update checks") - # FIXME: Handle the case where a user clicks on "X", instead of explicitly - # making a choice. We should probably ask them again on the next run. prompt = UpdateCheckPrompt( self.dangerzone, message=MSG_CONFIRM_UPDATE_CHECKS, - ok_text="Check Automatically", - cancel_text="Don't Check", + ok_text="Enable sandbox updates", + cancel_text="Do not make any requests", ) check = prompt.launch() if not check and prompt.x_pressed: @@ -149,167 +98,18 @@ class UpdaterThread(QtCore.QThread): return bool(check) def should_check_for_updates(self) -> bool: - """Determine if we can check for updates based on settings and user prefs. - - Note that this method only checks if the user has expressed an interest for - learning about new updates, and not whether we should actually make an update - check. Those two things are distinct, actually. For example: - - * A user may have expressed that they want to learn about new updates. - * A previous update check may have found out that there's a new version out. - * Thus we will always show to the user the cached info about the new version, - and won't make a new update check. - """ - log.debug("Checking platform type") - # TODO: Disable updates for Homebrew installations. - if platform.system() == "Linux" and not getattr(sys, "dangerzone_dev", False): - log.debug("Running on Linux, disabling updates") - if not self.check: # if not overidden by user - self.check = False - return False - - log.debug("Checking if first run of Dangerzone") - if self.dangerzone.settings.get("updater_last_check") is None: - log.debug("Dangerzone is running for the first time, updates are stalled") - self.dangerzone.settings.set("updater_last_check", 0, autosave=True) - return False - - log.debug("Checking if user has already expressed their preference") - if self.check is None: - log.debug("User has not been asked yet for update checks") - self.check = self.prompt_for_checks() - return bool(self.check) - elif not self.check: - log.debug("User has expressed that they don't want to check for updates") - return False - - return True - - def can_update(self, cur_version: str, latest_version: str) -> bool: - if version.parse(cur_version) == version.parse(latest_version): - return False - elif version.parse(cur_version) > version.parse(latest_version): - # FIXME: This is a sanity check, but we should improve its wording. - raise Exception("Received version is older than the latest version") - else: - return True - - def _get_now_timestamp(self) -> int: - return int(time.time()) - - def _should_postpone_update_check(self) -> bool: - """Consult and update cooldown timer. - - If the previous check happened before the cooldown period expires, do not check - again. - """ - current_time = self._get_now_timestamp() - last_check = self.dangerzone.settings.get("updater_last_check") - if current_time < last_check + UPDATE_CHECK_COOLDOWN_SECS: - log.debug("Cooling down update checks") - return True - else: - return False - - def get_latest_info(self) -> UpdateReport: - """Get the latest release info from GitHub. - - Also, render the changelog from Markdown format to HTML, so that we can show it - to the users. - """ try: - res = requests.get(self.GH_RELEASE_URL, timeout=self.REQ_TIMEOUT) - except Exception as e: - raise RuntimeError( - f"Encountered an exception while checking {self.GH_RELEASE_URL}: {e}" + should_check: Optional[bool] = releases.should_check_for_releases( + self.dangerzone.settings ) - - if res.status_code != 200: - raise RuntimeError( - f"Encountered an HTTP {res.status_code} error while checking" - f" {self.GH_RELEASE_URL}" - ) - - try: - info = res.json() - except json.JSONDecodeError: - raise ValueError(f"Received a non-JSON response from {self.GH_RELEASE_URL}") - - try: - version = info["tag_name"].lstrip("v") - changelog = markdown.markdown(info["body"]) - except KeyError: - raise ValueError( - f"Missing required fields in JSON response from {self.GH_RELEASE_URL}" - ) - - return UpdateReport(version=version, changelog=changelog) - - # XXX: This happens in parallel with other tasks. DO NOT alter global state! - def _check_for_updates(self) -> UpdateReport: - """Check for updates locally and remotely. - - Check for updates in two places: - - 1. In our settings, in case we have cached the latest version/changelog from a - previous run. - 2. In GitHub, by hitting the latest releases API. - """ - log.debug("Checking for Dangerzone updates") - latest_version = self.dangerzone.settings.get("updater_latest_version") - if version.parse(get_version()) < version.parse(latest_version): - log.debug("Determined that there is an update due to cached results") - return UpdateReport( - version=latest_version, - changelog=self.dangerzone.settings.get("updater_latest_changelog"), - ) - - # If the previous check happened before the cooldown period expires, do not - # check again. Else, bump the last check timestamp, before making the actual - # check. This is to ensure that even failed update checks respect the cooldown - # period. - if self._should_postpone_update_check(): - return UpdateReport() - else: - self.dangerzone.settings.set( - "updater_last_check", self._get_now_timestamp(), autosave=True - ) - - log.debug("Checking the latest GitHub release") - report = self.get_latest_info() - log.debug(f"Latest version in GitHub is {report.version}") - if report.version and self.can_update(latest_version, report.version): - log.debug( - f"Determined that there is an update due to a new GitHub version:" - f" {latest_version} < {report.version}" - ) - return report - - log.debug("No need to update") - return UpdateReport() - - ################## - # Logic for running update checks asynchronously - - def check_for_updates(self) -> UpdateReport: - """Check for updates and return a report with the findings: - - There are three scenarios when we check for updates, and each scenario returns a - slightly different answer: - - 1. No new updates: Return an empty update report. - 2. Updates are available: Return an update report with the latest version and - changelog, in HTML format. - 3. Update check failed: Return an update report that holds just the error - message. - """ - try: - res = self._check_for_updates() - except Exception as e: - log.exception("Encountered an error while checking for upgrades") - res = UpdateReport(error=str(e)) - - return res + except errors.NeedUserInput: + should_check = self.prompt_for_checks() + if should_check is not None: + self.dangerzone.settings.set( + "updater_check", should_check, autosave=True + ) + return bool(should_check) def run(self) -> None: - self.finished.emit(self.check_for_updates()) + has_updates = releases.check_for_updates(self.dangerzone.settings) + self.finished.emit(has_updates) diff --git a/dangerzone/settings.py b/dangerzone/settings.py index 0e30896..619b458 100644 --- a/dangerzone/settings.py +++ b/dangerzone/settings.py @@ -1,6 +1,7 @@ import json import logging import os +import platform from pathlib import Path from typing import TYPE_CHECKING, Any, Dict diff --git a/dangerzone/updater/errors.py b/dangerzone/updater/errors.py index 6b75c0e..e7f20b9 100644 --- a/dangerzone/updater/errors.py +++ b/dangerzone/updater/errors.py @@ -56,3 +56,9 @@ class CosignNotInstalledError(SignatureError): class InvalidLogIndex(SignatureError): pass + + +class NeedUserInput(UpdaterError): + """The user has not yet been prompted to know if they want to check for updates.""" + + pass diff --git a/dangerzone/updater/releases.py b/dangerzone/updater/releases.py new file mode 100644 index 0000000..82cb05f --- /dev/null +++ b/dangerzone/updater/releases.py @@ -0,0 +1,191 @@ +import json +import platform +import sys +import time +from typing import Optional + +import markdown +import requests +from packaging import version + +from .. import util +from ..settings import Settings +from . import errors, log + +# Check for updates at most every 12 hours. +UPDATE_CHECK_COOLDOWN_SECS = 60 * 60 * 12 + +GH_RELEASE_URL = ( + "https://api.github.com/repos/freedomofpress/dangerzone/releases/latest" +) +REQ_TIMEOUT = 15 + + +class UpdateReport: + """A report for an update check.""" + + def __init__( + self, + version: Optional[str] = None, + changelog: Optional[str] = None, + error: Optional[str] = None, + ): + self.version = version + self.changelog = changelog + self.error = error + + def empty(self) -> bool: + return self.version is None and self.changelog is None and self.error is None + + +def _get_now_timestamp() -> int: + return int(time.time()) + + +def _should_postpone_update_check(settings) -> bool: + """Consult and update cooldown timer. + + If the previous check happened before the cooldown period expires, do not check + again. + """ + current_time = _get_now_timestamp() + last_check = settings.get("updater_last_check") + if current_time < last_check + UPDATE_CHECK_COOLDOWN_SECS: + log.debug("Cooling down update checks") + return True + else: + return False + + +def ensure_sane_update(cur_version: str, latest_version: str) -> bool: + if version.parse(cur_version) == version.parse(latest_version): + return False + elif version.parse(cur_version) > version.parse(latest_version): + # FIXME: This is a sanity check, but we should improve its wording. + raise Exception("Received version is older than the latest version") + else: + return True + + +def fetch_release_info() -> UpdateReport: + """Get the latest release info from GitHub. + + Also, render the changelog from Markdown format to HTML, so that we can show it + to the users. + """ + try: + res = requests.get(GH_RELEASE_URL, timeout=REQ_TIMEOUT) + except Exception as e: + raise RuntimeError( + f"Encountered an exception while checking {GH_RELEASE_URL}: {e}" + ) + + if res.status_code != 200: + raise RuntimeError( + f"Encountered an HTTP {res.status_code} error while checking" + f" {GH_RELEASE_URL}" + ) + + try: + info = res.json() + except json.JSONDecodeError: + raise ValueError(f"Received a non-JSON response from {GH_RELEASE_URL}") + + try: + version = info["tag_name"].lstrip("v") + changelog = markdown.markdown(info["body"]) + except KeyError: + raise ValueError( + f"Missing required fields in JSON response from {GH_RELEASE_URL}" + ) + + return UpdateReport(version=version, changelog=changelog) + + +def should_check_for_releases(settings: Settings) -> bool: + """Determine if we can check for release updates based on settings and user prefs. + + Note that this method only checks if the user has expressed an interest for + learning about new updates, and not whether we should actually make an update + check. Those two things are distinct, actually. For example: + + * A user may have expressed that they want to learn about new updates. + * A previous update check may have found out that there's a new version out. + * Thus we will always show to the user the cached info about the new version, + and won't make a new update check. + """ + check = settings.get("updater_check") + + log.debug("Checking platform type") + # TODO: Disable updates for Homebrew installations. + if platform.system() == "Linux" and not getattr(sys, "dangerzone_dev", False): + log.debug("Running on Linux, disabling updates") + if not check: # if not overidden by user + settings.set("updater_check", False, autosave=True) + return False + + log.debug("Checking if first run of Dangerzone") + if settings.get("updater_last_check") is None: + log.debug("Dangerzone is running for the first time, updates are stalled") + settings.set("updater_last_check", 0, autosave=True) + return False + + log.debug("Checking if user has already expressed their preference") + if check is None: + log.debug("User has not been asked yet for update checks") + raise errors.NeedUserInput() + elif not check: + log.debug("User has expressed that they don't want to check for updates") + return False + + return True + + +def check_for_updates(settings) -> UpdateReport: + """Check for updates locally and remotely. + + Check for updates (locally and remotely) and return a report with the findings: + + There are three scenarios when we check for updates, and each scenario returns a + slightly different answer: + + 1. No new updates: Return an empty update report. + 2. Updates are available: Return an update report with the latest version and + changelog, in HTML format. + 3. Update check failed: Return an update report that holds just the error + message. + """ + try: + log.debug("Checking for Dangerzone updates") + latest_version = settings.get("updater_latest_version") + if version.parse(util.get_version()) < version.parse(latest_version): + log.debug("Determined that there is an update due to cached results") + return UpdateReport( + version=latest_version, + changelog=settings.get("updater_latest_changelog"), + ) + + # If the previous check happened before the cooldown period expires, do not + # check again. Else, bump the last check timestamp, before making the actual + # check. This is to ensure that even failed update checks respect the cooldown + # period. + if _should_postpone_update_check(settings): + return UpdateReport() + else: + settings.set("updater_last_check", _get_now_timestamp(), autosave=True) + + log.debug("Checking the latest GitHub release") + report = fetch_release_info() + log.debug(f"Latest version in GitHub is {report.version}") + if report.version and ensure_sane_update(latest_version, report.version): + log.debug( + f"Determined that there is an update due to a new GitHub version:" + f" {latest_version} < {report.version}" + ) + return report + + log.debug("No need to update") + return UpdateReport() + except Exception as e: + log.exception("Encountered an error while checking for upgrades") + return UpdateReport(error=str(e)) diff --git a/dangerzone/util.py b/dangerzone/util.py index 6cae643..212652e 100644 --- a/dangerzone/util.py +++ b/dangerzone/util.py @@ -69,6 +69,7 @@ def get_tessdata_dir() -> Path: def get_version() -> str: + """Returns the Dangerzone version string.""" try: with get_resource_path("version.txt").open() as f: version = f.read().strip() diff --git a/tests/gui/test_main_window.py b/tests/gui/test_main_window.py index e4fc127..75a11d3 100644 --- a/tests/gui/test_main_window.py +++ b/tests/gui/test_main_window.py @@ -24,9 +24,10 @@ from dangerzone.gui.main_window import ( QtGui, WaitingWidgetContainer, ) -from dangerzone.gui.updater import UpdateReport, UpdaterThread +from dangerzone.gui.updater import UpdaterThread from dangerzone.isolation_provider.container import Container from dangerzone.isolation_provider.dummy import Dummy +from dangerzone.updater import releases from .test_updater import assert_report_equal, default_updater_settings @@ -147,7 +148,7 @@ def test_no_update( # Check that the callback function gets an empty report. handle_updates_spy.assert_called_once() - assert_report_equal(handle_updates_spy.call_args.args[0], UpdateReport()) + assert_report_equal(handle_updates_spy.call_args.args[0], releases.UpdateReport()) # Check that the menu entries remain exactly the same. menu_actions_after = window.hamburger_button.menu().actions() @@ -171,8 +172,8 @@ def test_update_detected( # Make requests.get().json() return the following dictionary. mock_upstream_info = {"tag_name": "99.9.9", "body": "changelog"} - mocker.patch("dangerzone.gui.updater.requests.get") - requests_mock = updater_module.requests.get + mocker.patch("dangerzone.updater.releases.requests.get") + requests_mock = releases.requests.get requests_mock().status_code = 200 # type: ignore [call-arg] requests_mock().json.return_value = mock_upstream_info # type: ignore [attr-defined, call-arg] @@ -191,7 +192,8 @@ def test_update_detected( # Check that the callback function gets an update report. handle_updates_spy.assert_called_once() assert_report_equal( - handle_updates_spy.call_args.args[0], UpdateReport("99.9.9", "

changelog

") + handle_updates_spy.call_args.args[0], + releases.UpdateReport("99.9.9", "

changelog

"), ) # Check that the settings have been updated properly. @@ -281,9 +283,9 @@ def test_update_error( qt_updater.dangerzone.settings.set("updater_last_check", 0) qt_updater.dangerzone.settings.set("updater_errors", 0) - # Make requests.get() return an errorthe following dictionary. - mocker.patch("dangerzone.gui.updater.requests.get") - requests_mock = updater_module.requests.get + # Make requests.get() return an error + mocker.patch("dangerzone.updater.releases.requests.get") + requests_mock = releases.requests.get requests_mock.side_effect = Exception("failed") # type: ignore [attr-defined] window = MainWindow(qt_updater.dangerzone) diff --git a/tests/gui/test_updater.py b/tests/gui/test_updater.py index 9bf544a..114dac5 100644 --- a/tests/gui/test_updater.py +++ b/tests/gui/test_updater.py @@ -12,7 +12,9 @@ from pytestqt.qtbot import QtBot from dangerzone import settings from dangerzone.gui import updater as updater_module -from dangerzone.gui.updater import UpdateReport, UpdaterThread +from dangerzone.gui.updater import UpdaterThread +from dangerzone.updater import releases +from dangerzone.updater.releases import UpdateReport from dangerzone.util import get_version from ..test_settings import default_settings_0_4_1, save_settings @@ -116,6 +118,7 @@ def test_linux_no_check(updater: UpdaterThread, monkeypatch: MonkeyPatch) -> Non def test_user_prompts(updater: UpdaterThread, mocker: MockerFixture) -> None: """Test prompting users to ask them if they want to enable update checks.""" + settings = updater.dangerzone.settings # First run # # When Dangerzone runs for the first time, users should not be asked to enable @@ -124,7 +127,7 @@ def test_user_prompts(updater: UpdaterThread, mocker: MockerFixture) -> None: expected_settings["updater_check"] = None expected_settings["updater_last_check"] = 0 assert updater.should_check_for_updates() is False - assert updater.dangerzone.settings.get_updater_settings() == expected_settings + assert settings.get_updater_settings() == expected_settings # Second run # @@ -138,14 +141,14 @@ def test_user_prompts(updater: UpdaterThread, mocker: MockerFixture) -> None: prompt_mock().launch.return_value = False # type: ignore [attr-defined] expected_settings["updater_check"] = False assert updater.should_check_for_updates() is False - assert updater.dangerzone.settings.get_updater_settings() == expected_settings + assert settings.get_updater_settings() == expected_settings # Reset the "updater_check" field and check enabling update checks. - updater.dangerzone.settings.set("updater_check", None) + settings.set("updater_check", None) prompt_mock().launch.return_value = True # type: ignore [attr-defined] expected_settings["updater_check"] = True assert updater.should_check_for_updates() is True - assert updater.dangerzone.settings.get_updater_settings() == expected_settings + assert settings.get_updater_settings() == expected_settings # Third run # @@ -153,7 +156,7 @@ def test_user_prompts(updater: UpdaterThread, mocker: MockerFixture) -> None: # checks. prompt_mock().side_effect = RuntimeError("Should not be called") # type: ignore [attr-defined] for check in [True, False]: - updater.dangerzone.settings.set("updater_check", check) + settings.set("updater_check", check) assert updater.should_check_for_updates() == check @@ -161,43 +164,44 @@ def test_update_checks( updater: UpdaterThread, monkeypatch: MonkeyPatch, mocker: MockerFixture ) -> None: """Test version update checks.""" + settings = updater.dangerzone.settings # This dictionary will simulate GitHub's response. mock_upstream_info = {"tag_name": f"v{get_version()}", "body": "changelog"} # Make requests.get().json() return the above dictionary. - mocker.patch("dangerzone.gui.updater.requests.get") - requests_mock = updater_module.requests.get + mocker.patch("dangerzone.updater.releases.requests.get") + requests_mock = updater_module.releases.requests.get requests_mock().status_code = 200 # type: ignore [call-arg] requests_mock().json.return_value = mock_upstream_info # type: ignore [attr-defined, call-arg] # Always assume that we can perform multiple update checks in a row. - monkeypatch.setattr(updater, "_should_postpone_update_check", lambda: False) + mocker.patch( + "dangerzone.updater.releases._should_postpone_update_check", return_value=False + ) # Test 1 - Check that the current version triggers no updates. - report = updater.check_for_updates() + report = releases.check_for_updates(settings) assert_report_equal(report, UpdateReport()) # Test 2 - Check that a newer version triggers updates, and that the changelog is # rendered from Markdown to HTML. mock_upstream_info["tag_name"] = "v99.9.9" - report = updater.check_for_updates() + report = releases.check_for_updates(settings) assert_report_equal( report, UpdateReport(version="99.9.9", changelog="

changelog

") ) # Test 3 - Check that HTTP errors are converted to error reports. requests_mock.side_effect = Exception("failed") # type: ignore [attr-defined] - report = updater.check_for_updates() - error_msg = ( - f"Encountered an exception while checking {updater.GH_RELEASE_URL}: failed" - ) + report = releases.check_for_updates(settings) + error_msg = f"Encountered an exception while checking {updater_module.releases.GH_RELEASE_URL}: failed" assert_report_equal(report, UpdateReport(error=error_msg)) # Test 4 - Check that cached version/changelog info do not trigger an update check. - updater.dangerzone.settings.set("updater_latest_version", "99.9.9") - updater.dangerzone.settings.set("updater_latest_changelog", "

changelog

") + settings.set("updater_latest_version", "99.9.9") + settings.set("updater_latest_changelog", "

changelog

") - report = updater.check_for_updates() + report = releases.check_for_updates(settings) assert_report_equal( report, UpdateReport(version="99.9.9", changelog="

changelog

") ) @@ -205,14 +209,16 @@ def test_update_checks( def test_update_checks_cooldown(updater: UpdaterThread, mocker: MockerFixture) -> None: """Make sure Dangerzone only checks for updates every X hours""" - updater.dangerzone.settings.set("updater_check", True) - updater.dangerzone.settings.set("updater_last_check", 0) + settings = updater.dangerzone.settings + + settings.set("updater_check", True) + settings.set("updater_last_check", 0) # Mock some functions before the tests start - cooldown_spy = mocker.spy(updater, "_should_postpone_update_check") - timestamp_mock = mocker.patch.object(updater, "_get_now_timestamp") - mocker.patch("dangerzone.gui.updater.requests.get") - requests_mock = updater_module.requests.get + cooldown_spy = mocker.spy(updater_module.releases, "_should_postpone_update_check") + timestamp_mock = mocker.patch.object(updater_module.releases, "_get_now_timestamp") + mocker.patch("dangerzone.updater.releases.requests.get") + requests_mock = updater_module.releases.requests.get # # Make requests.get().json() return the version info that we want. mock_upstream_info = {"tag_name": "99.9.9", "body": "changelog"} @@ -225,9 +231,9 @@ def test_update_checks_cooldown(updater: UpdaterThread, mocker: MockerFixture) - curtime = int(time.time()) timestamp_mock.return_value = curtime - report = updater.check_for_updates() + report = releases.check_for_updates(settings) assert cooldown_spy.spy_return is False - assert updater.dangerzone.settings.get("updater_last_check") == curtime + assert settings.get("updater_last_check") == curtime assert_report_equal(report, UpdateReport("99.9.9", "

changelog

")) # Test 2: Advance the current time by 1 second, and ensure that no update will take @@ -236,41 +242,39 @@ def test_update_checks_cooldown(updater: UpdaterThread, mocker: MockerFixture) - curtime += 1 timestamp_mock.return_value = curtime requests_mock.side_effect = Exception("failed") # type: ignore [attr-defined] - updater.dangerzone.settings.set("updater_latest_version", get_version()) - updater.dangerzone.settings.set("updater_latest_changelog", None) + settings.set("updater_latest_version", get_version()) + settings.set("updater_latest_changelog", None) - report = updater.check_for_updates() + report = releases.check_for_updates(settings) assert cooldown_spy.spy_return is True - assert updater.dangerzone.settings.get("updater_last_check") == curtime - 1 # type: ignore [unreachable] + assert settings.get("updater_last_check") == curtime - 1 # type: ignore [unreachable] assert_report_equal(report, UpdateReport()) # Test 3: Advance the current time by seconds. Ensure that # Dangerzone checks for updates again, and the last check timestamp gets bumped. - curtime += updater_module.UPDATE_CHECK_COOLDOWN_SECS + curtime += updater_module.releases.UPDATE_CHECK_COOLDOWN_SECS timestamp_mock.return_value = curtime requests_mock.side_effect = None - report = updater.check_for_updates() + report = releases.check_for_updates(settings) assert cooldown_spy.spy_return is False - assert updater.dangerzone.settings.get("updater_last_check") == curtime + assert settings.get("updater_last_check") == curtime assert_report_equal(report, UpdateReport("99.9.9", "

changelog

")) # Test 4: Make Dangerzone check for updates again, but this time, it should # encounter an error while doing so. In that case, the last check timestamp # should be bumped, so that subsequent checks don't take place. - updater.dangerzone.settings.set("updater_latest_version", get_version()) - updater.dangerzone.settings.set("updater_latest_changelog", None) + settings.set("updater_latest_version", get_version()) + settings.set("updater_latest_changelog", None) - curtime += updater_module.UPDATE_CHECK_COOLDOWN_SECS + curtime += updater_module.releases.UPDATE_CHECK_COOLDOWN_SECS timestamp_mock.return_value = curtime requests_mock.side_effect = Exception("failed") - report = updater.check_for_updates() + report = releases.check_for_updates(settings) assert cooldown_spy.spy_return is False - assert updater.dangerzone.settings.get("updater_last_check") == curtime - error_msg = ( - f"Encountered an exception while checking {updater.GH_RELEASE_URL}: failed" - ) + assert settings.get("updater_last_check") == curtime + error_msg = f"Encountered an exception while checking {updater_module.releases.GH_RELEASE_URL}: failed" assert_report_equal(report, UpdateReport(error=error_msg)) @@ -278,16 +282,17 @@ def test_update_errors( updater: UpdaterThread, monkeypatch: MonkeyPatch, mocker: MockerFixture ) -> None: """Test update check errors.""" + settings = updater.dangerzone.settings # Mock requests.get(). - mocker.patch("dangerzone.gui.updater.requests.get") - requests_mock = updater_module.requests.get + mocker.patch("dangerzone.updater.releases.requests.get") + requests_mock = releases.requests.get # Always assume that we can perform multiple update checks in a row. - monkeypatch.setattr(updater, "_should_postpone_update_check", lambda: False) + monkeypatch.setattr(releases, "_should_postpone_update_check", lambda: False) # Test 1 - Check that request exceptions are being detected as errors. requests_mock.side_effect = Exception("bad url") # type: ignore [attr-defined] - report = updater.check_for_updates() + report = releases.check_for_updates(settings) assert report.error is not None assert "bad url" in report.error assert "Encountered an exception" in report.error @@ -298,7 +303,7 @@ def test_update_errors( requests_mock.return_value = MockResponse500() # type: ignore [attr-defined] requests_mock.side_effect = None # type: ignore [attr-defined] - report = updater.check_for_updates() + report = releases.check_for_updates(settings) assert report.error is not None assert "Encountered an HTTP 500 error" in report.error @@ -310,7 +315,7 @@ def test_update_errors( return json.loads("bad json") requests_mock.return_value = MockResponseBadJSON() # type: ignore [attr-defined] - report = updater.check_for_updates() + report = releases.check_for_updates(settings) assert report.error is not None assert "Received a non-JSON response" in report.error @@ -322,7 +327,7 @@ def test_update_errors( return {} requests_mock.return_value = MockResponseEmpty() # type: ignore [attr-defined] - report = updater.check_for_updates() + report = releases.check_for_updates(settings) assert report.error is not None assert "Missing required fields in JSON" in report.error @@ -334,7 +339,7 @@ def test_update_errors( return {"tag_name": "vbad_version", "body": "changelog"} requests_mock.return_value = MockResponseBadVersion() # type: ignore [attr-defined] - report = updater.check_for_updates() + report = releases.check_for_updates(settings) assert report.error is not None assert "Invalid version" in report.error @@ -346,7 +351,7 @@ def test_update_errors( return {"tag_name": "v99.9.9", "body": ["bad", "markdown"]} requests_mock.return_value = MockResponseBadMarkdown() # type: ignore [attr-defined] - report = updater.check_for_updates() + report = releases.check_for_updates(settings) assert report.error is not None # Test 7 - Check that a valid response passes. @@ -357,7 +362,7 @@ def test_update_errors( return {"tag_name": "v99.9.9", "body": "changelog"} requests_mock.return_value = MockResponseValid() # type: ignore [attr-defined] - report = updater.check_for_updates() + report = releases.check_for_updates(settings) assert_report_equal(report, UpdateReport("99.9.9", "

changelog

")) @@ -367,24 +372,28 @@ def test_update_check_prompt( ) -> None: """Test that the prompt to enable update checks works properly.""" # Force Dangerzone to check immediately for updates - qt_updater.dangerzone.settings.set("updater_last_check", 0) + settings = qt_updater.dangerzone.settings + settings.set("updater_last_check", 0) # Test 1 - Check that on the second run of Dangerzone, the user is prompted to # choose if they want to enable update checks. def check_button_labels() -> None: dialog = qt_updater.dangerzone.app.activeWindow() - assert dialog.ok_button.text() == "Check Automatically" # type: ignore [attr-defined] - assert dialog.cancel_button.text() == "Don't Check" # type: ignore [attr-defined] + assert dialog.ok_button.text() == "Enable sandbox updates" # type: ignore [attr-defined] + assert dialog.cancel_button.text() == "Do not make any requests" # type: ignore [attr-defined] dialog.ok_button.click() # type: ignore [attr-defined] QtCore.QTimer.singleShot(500, check_button_labels) + mocker.patch( + "dangerzone.updater.releases._should_postpone_update_check", return_value=False + ) res = qt_updater.should_check_for_updates() assert res is True # Test 2 - Check that when the user chooses to enable update checks, we # store that decision in the settings. - qt_updater.check = None + settings.set("updater_check", None, autosave=True) def click_ok() -> None: dialog = qt_updater.dangerzone.app.activeWindow() @@ -394,11 +403,11 @@ def test_update_check_prompt( res = qt_updater.should_check_for_updates() assert res is True - assert qt_updater.check is True + assert settings.get("updater_check") is True # Test 3 - Same as the previous test, but check that clicking on cancel stores the # opposite decision. - qt_updater.check = None # type: ignore [unreachable] + settings.set("updater_check", None) # type: ignore [unreachable] def click_cancel() -> None: dialog = qt_updater.dangerzone.app.activeWindow() @@ -408,11 +417,11 @@ def test_update_check_prompt( res = qt_updater.should_check_for_updates() assert res is False - assert qt_updater.check is False + assert settings.get("updater_check") is False # Test 4 - Same as the previous test, but check that clicking on "X" does not store # any decision. - qt_updater.check = None + settings.set("updater_check", None, autosave=True) def click_x() -> None: dialog = qt_updater.dangerzone.app.activeWindow() @@ -422,4 +431,4 @@ def test_update_check_prompt( res = qt_updater.should_check_for_updates() assert res is False - assert qt_updater.check is None + assert settings.get("updater_check") is None From 61c8f2a6add12c99e87af9f7d0390e41803dacdc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Sat, 1 Mar 2025 15:50:32 +0100 Subject: [PATCH 10/24] Replace the `updater_check` setting by `updater_check_all` This new setting triggers the same user prompts, but the actual meaning of it differs, since users will now be accepting to upgrade the container image rather than just checking for new releases. Changing the name of the setting will trigger this prompt for all users, effectively ensuring they want their image to be automatically upgraded. --- dangerzone/gui/main_window.py | 4 ++-- dangerzone/gui/updater.py | 2 +- dangerzone/settings.py | 2 +- dangerzone/updater/releases.py | 4 ++-- docs/developer/updates.md | 38 ++++++---------------------------- tests/gui/test_main_window.py | 16 +++++++------- tests/gui/test_updater.py | 28 ++++++++++++------------- 7 files changed, 34 insertions(+), 60 deletions(-) diff --git a/dangerzone/gui/main_window.py b/dangerzone/gui/main_window.py index a17cb1a..5924aae 100644 --- a/dangerzone/gui/main_window.py +++ b/dangerzone/gui/main_window.py @@ -163,7 +163,7 @@ class MainWindow(QtWidgets.QMainWindow): self.toggle_updates_action.triggered.connect(self.toggle_updates_triggered) self.toggle_updates_action.setCheckable(True) self.toggle_updates_action.setChecked( - bool(self.dangerzone.settings.get("updater_check")) + bool(self.dangerzone.settings.get("updater_check_all")) ) # Add the "Exit" action @@ -281,7 +281,7 @@ class MainWindow(QtWidgets.QMainWindow): def toggle_updates_triggered(self) -> None: """Change the underlying update check settings based on the user's choice.""" check = self.toggle_updates_action.isChecked() - self.dangerzone.settings.set("updater_check", check) + self.dangerzone.settings.set("updater_check_all", check) self.dangerzone.settings.save() def handle_docker_desktop_version_check( diff --git a/dangerzone/gui/updater.py b/dangerzone/gui/updater.py index f4dae0b..ece2619 100644 --- a/dangerzone/gui/updater.py +++ b/dangerzone/gui/updater.py @@ -106,7 +106,7 @@ class UpdaterThread(QtCore.QThread): should_check = self.prompt_for_checks() if should_check is not None: self.dangerzone.settings.set( - "updater_check", should_check, autosave=True + "updater_check_all", should_check, autosave=True ) return bool(should_check) diff --git a/dangerzone/settings.py b/dangerzone/settings.py index 619b458..8613ce7 100644 --- a/dangerzone/settings.py +++ b/dangerzone/settings.py @@ -33,7 +33,7 @@ class Settings: "open": True, "open_app": None, "safe_extension": SAFE_EXTENSION, - "updater_check": None, + "updater_check_all": None, "updater_last_check": None, # last check in UNIX epoch (secs since 1970) # FIXME: How to invalidate those if they change upstream? "updater_latest_version": get_version(), diff --git a/dangerzone/updater/releases.py b/dangerzone/updater/releases.py index 82cb05f..7fc842b 100644 --- a/dangerzone/updater/releases.py +++ b/dangerzone/updater/releases.py @@ -114,14 +114,14 @@ def should_check_for_releases(settings: Settings) -> bool: * Thus we will always show to the user the cached info about the new version, and won't make a new update check. """ - check = settings.get("updater_check") + check = settings.get("updater_check_all") log.debug("Checking platform type") # TODO: Disable updates for Homebrew installations. if platform.system() == "Linux" and not getattr(sys, "dangerzone_dev", False): log.debug("Running on Linux, disabling updates") if not check: # if not overidden by user - settings.set("updater_check", False, autosave=True) + settings.set("updater_check_all", False, autosave=True) return False log.debug("Checking if first run of Dangerzone") diff --git a/docs/developer/updates.md b/docs/developer/updates.md index e5e1197..27cbc60 100644 --- a/docs/developer/updates.md +++ b/docs/developer/updates.md @@ -11,8 +11,7 @@ https://github.com/freedomofpress/dangerzone/wiki/Updates ## Design overview -This feature introduces a hamburger icon that will be visible across almost all -of the Dangerzone windows. This will be used to notify the users about updates. +A hamburger icon is visible across almost all of the Dangerzone windows, and is used to notify the users when there are new releases. ### First run @@ -21,8 +20,7 @@ _We detect it's the first time Dangerzone runs because the Add the following keys in our `settings.json` file. -* `"updater_check": None`: Whether to check for updates or not. `None` means - that the user has not decided yet, and is the default. +* `"updater_check_all": True`: Whether or not to check and apply independent container updates and check for new releases. * `"updater_last_check": None`: The last time we checked for updates (in seconds from Unix epoch). None means that we haven't checked yet. * `"updater_latest_version": "0.4.2"`: The latest version that the Dangerzone @@ -32,43 +30,19 @@ Add the following keys in our `settings.json` file. * `"updater_errors: 0`: The number of update check errors that we have encountered in a row. -Note: - -* If on Linux, make `"updater_check": False`, since we normally have - other update channels for these platforms. +Previously, `"updater_check"` was used to determine if we should check for new releases, and has been replaced by `"updater_check_all"` when adding support for independent container updates. ### Second run _We detect it's the second time Dangerzone runs because -`settings["updater_check"] is not None and settings["updater_last_check"] is +`settings["updater_check_all"] is not None and settings["updater_last_check"] is None`._ -Before starting up the main window, show this window: - -* Title: Dangerzone Updater -* Body: - - > Do you want Dangerzone to automatically check for updates? - > - > If you accept, Dangerzone will check the latest releases page in github.com - > on startup. Otherwise it will make no network requests and won't inform you - > about new releases. - > - > If you prefer another way of getting notified about new releases, we suggest adding - > to your RSS reader our [Mastodon feed](https://fosstodon.org/@dangerzone.rss). For more information - > about updates, check [this webpage](https://github.com/freedomofpress/dangerzone/wiki/Updates). - -* Buttons: - - Check Automaticaly: Store `settings["updater_check"] = True` - - Don't Check: Store `settings["updater_check"] = False` - -Note: -* Users will be able to change their choice from the hamburger menu, which will - contain an entry called "Check for updates", that users can check and uncheck. +Before starting up the main window, the user is prompted if they want to enable update checks. ### Subsequent runs -_We perform the following only if `settings["updater_check"] == True`._ +_We perform the following only if `settings["updater_check_all"] == True`._ 1. Spawn a new thread so that we don't block the main window. 2. Check if we have cached information about a release (version and changelog). diff --git a/tests/gui/test_main_window.py b/tests/gui/test_main_window.py index 75a11d3..4422af1 100644 --- a/tests/gui/test_main_window.py +++ b/tests/gui/test_main_window.py @@ -97,7 +97,7 @@ def test_default_menu( updater: UpdaterThread, ) -> None: """Check that the default menu entries are in order.""" - updater.dangerzone.settings.set("updater_check", True) + updater.dangerzone.settings.set("updater_check_all", True) window = MainWindow(updater.dangerzone) menu_actions = window.hamburger_button.menu().actions() @@ -115,7 +115,7 @@ def test_default_menu( toggle_updates_action.trigger() assert not toggle_updates_action.isChecked() - assert updater.dangerzone.settings.get("updater_check") is False + assert updater.dangerzone.settings.get("updater_check_all") is False def test_no_update( @@ -128,12 +128,12 @@ def test_no_update( # Check that when no update is detected, e.g., due to update cooldown, an empty # report is received that does not affect the menu entries. curtime = int(time.time()) - updater.dangerzone.settings.set("updater_check", True) + updater.dangerzone.settings.set("updater_check_all", True) updater.dangerzone.settings.set("updater_errors", 9) updater.dangerzone.settings.set("updater_last_check", curtime) expected_settings = default_updater_settings() - expected_settings["updater_check"] = True + expected_settings["updater_check_all"] = True expected_settings["updater_errors"] = 0 # errors must be cleared expected_settings["updater_last_check"] = curtime @@ -166,7 +166,7 @@ def test_update_detected( ) -> None: """Test that a newly detected version leads to a notification to the user.""" - qt_updater.dangerzone.settings.set("updater_check", True) + qt_updater.dangerzone.settings.set("updater_check_all", True) qt_updater.dangerzone.settings.set("updater_last_check", 0) qt_updater.dangerzone.settings.set("updater_errors", 9) @@ -198,7 +198,7 @@ def test_update_detected( # Check that the settings have been updated properly. expected_settings = default_updater_settings() - expected_settings["updater_check"] = True + expected_settings["updater_check_all"] = True expected_settings["updater_last_check"] = qt_updater.dangerzone.settings.get( "updater_last_check" ) @@ -279,7 +279,7 @@ def test_update_error( ) -> None: """Test that an error during an update check leads to a notification to the user.""" # Test 1 - Check that the first error does not notify the user. - qt_updater.dangerzone.settings.set("updater_check", True) + qt_updater.dangerzone.settings.set("updater_check_all", True) qt_updater.dangerzone.settings.set("updater_last_check", 0) qt_updater.dangerzone.settings.set("updater_errors", 0) @@ -306,7 +306,7 @@ def test_update_error( # Check that the settings have been updated properly. expected_settings = default_updater_settings() - expected_settings["updater_check"] = True + expected_settings["updater_check_all"] = True expected_settings["updater_last_check"] = qt_updater.dangerzone.settings.get( "updater_last_check" ) diff --git a/tests/gui/test_updater.py b/tests/gui/test_updater.py index 114dac5..a0ee4ad 100644 --- a/tests/gui/test_updater.py +++ b/tests/gui/test_updater.py @@ -106,7 +106,7 @@ def test_post_0_4_2_settings( def test_linux_no_check(updater: UpdaterThread, monkeypatch: MonkeyPatch) -> None: """Ensure that Dangerzone on Linux does not make any update check.""" expected_settings = default_updater_settings() - expected_settings["updater_check"] = False + expected_settings["updater_check_all"] = False expected_settings["updater_last_check"] = None # XXX: Simulate Dangerzone installed via package manager. @@ -124,7 +124,7 @@ def test_user_prompts(updater: UpdaterThread, mocker: MockerFixture) -> None: # When Dangerzone runs for the first time, users should not be asked to enable # updates. expected_settings = default_updater_settings() - expected_settings["updater_check"] = None + expected_settings["updater_check_all"] = None expected_settings["updater_last_check"] = 0 assert updater.should_check_for_updates() is False assert settings.get_updater_settings() == expected_settings @@ -139,14 +139,14 @@ def test_user_prompts(updater: UpdaterThread, mocker: MockerFixture) -> None: # Check disabling update checks. prompt_mock().launch.return_value = False # type: ignore [attr-defined] - expected_settings["updater_check"] = False + expected_settings["updater_check_all"] = False assert updater.should_check_for_updates() is False assert settings.get_updater_settings() == expected_settings - # Reset the "updater_check" field and check enabling update checks. - settings.set("updater_check", None) + # Reset the "updater_check_all" field and check enabling update checks. + settings.set("updater_check_all", None) prompt_mock().launch.return_value = True # type: ignore [attr-defined] - expected_settings["updater_check"] = True + expected_settings["updater_check_all"] = True assert updater.should_check_for_updates() is True assert settings.get_updater_settings() == expected_settings @@ -156,7 +156,7 @@ def test_user_prompts(updater: UpdaterThread, mocker: MockerFixture) -> None: # checks. prompt_mock().side_effect = RuntimeError("Should not be called") # type: ignore [attr-defined] for check in [True, False]: - settings.set("updater_check", check) + settings.set("updater_check_all", check) assert updater.should_check_for_updates() == check @@ -211,7 +211,7 @@ def test_update_checks_cooldown(updater: UpdaterThread, mocker: MockerFixture) - """Make sure Dangerzone only checks for updates every X hours""" settings = updater.dangerzone.settings - settings.set("updater_check", True) + settings.set("updater_check_all", True) settings.set("updater_last_check", 0) # Mock some functions before the tests start @@ -393,7 +393,7 @@ def test_update_check_prompt( # Test 2 - Check that when the user chooses to enable update checks, we # store that decision in the settings. - settings.set("updater_check", None, autosave=True) + settings.set("updater_check_all", None, autosave=True) def click_ok() -> None: dialog = qt_updater.dangerzone.app.activeWindow() @@ -403,11 +403,11 @@ def test_update_check_prompt( res = qt_updater.should_check_for_updates() assert res is True - assert settings.get("updater_check") is True + assert settings.get("updater_check_all") is True # Test 3 - Same as the previous test, but check that clicking on cancel stores the # opposite decision. - settings.set("updater_check", None) # type: ignore [unreachable] + settings.set("updater_check_all", None) # type: ignore [unreachable] def click_cancel() -> None: dialog = qt_updater.dangerzone.app.activeWindow() @@ -417,11 +417,11 @@ def test_update_check_prompt( res = qt_updater.should_check_for_updates() assert res is False - assert settings.get("updater_check") is False + assert settings.get("updater_check_all") is False # Test 4 - Same as the previous test, but check that clicking on "X" does not store # any decision. - settings.set("updater_check", None, autosave=True) + settings.set("updater_check_all", None, autosave=True) def click_x() -> None: dialog = qt_updater.dangerzone.app.activeWindow() @@ -431,4 +431,4 @@ def test_update_check_prompt( res = qt_updater.should_check_for_updates() assert res is False - assert settings.get("updater_check") is None + assert settings.get("updater_check_all") is None From bdceee53d036671377da185c8c21833773e011e5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Mon, 3 Mar 2025 12:58:27 +0100 Subject: [PATCH 11/24] Add a `dangerzone-image store-signature` CLI command This can be useful when signatures are missing from the system, for an already present image, and can be used as a way to fix user issues. --- dangerzone/updater/cli.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/dangerzone/updater/cli.py b/dangerzone/updater/cli.py index ede57d8..ee0915b 100644 --- a/dangerzone/updater/cli.py +++ b/dangerzone/updater/cli.py @@ -42,6 +42,17 @@ def upgrade(image: str, pubkey: str) -> None: raise click.Abort() +@main.command() +@click.argument("image", default=DEFAULT_IMAGE_NAME) +@click.option("--pubkey", default=signatures.DEFAULT_PUBKEY_LOCATION) +def store_signatures(image: str, pubkey: str) -> None: + manifest_digest = registry.get_manifest_digest(image) + sigs = signatures.get_remote_signatures(image, manifest_digest) + signatures.verify_signatures(sigs, manifest_digest, pubkey) + signatures.store_signatures(sigs, manifest_digest, pubkey, update_logindex=False) + click.echo(f"✅ Signatures has been verified and stored locally") + + @main.command() @click.argument("image_filename") @click.option("--pubkey", default=signatures.DEFAULT_PUBKEY_LOCATION) From 8d7e96555372f30b321ed4f75e8ac85b0296594d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Mon, 3 Mar 2025 12:59:36 +0100 Subject: [PATCH 12/24] Display the `{podman,docker} pull` progress when installing a new image The progressbars we see when using this same commands on the command line doesn't seem to be passed to the python process here, unfortunately. --- dangerzone/container_utils.py | 29 ++++++++++++----- dangerzone/gui/main_window.py | 32 ++++++++++++++++--- dangerzone/isolation_provider/base.py | 2 +- dangerzone/isolation_provider/container.py | 37 ++++++++++++++-------- dangerzone/updater/signatures.py | 15 ++++++--- 5 files changed, 84 insertions(+), 31 deletions(-) diff --git a/dangerzone/container_utils.py b/dangerzone/container_utils.py index 7c8f06d..26621fb 100644 --- a/dangerzone/container_utils.py +++ b/dangerzone/container_utils.py @@ -4,14 +4,16 @@ import platform import shutil import subprocess from pathlib import Path -from typing import List, Optional, Tuple +from typing import IO, Callable, List, Optional, Tuple from . import errors from .settings import Settings from .util import get_resource_path, get_subprocess_startupinfo OLD_CONTAINER_NAME = "dangerzone.rocks/dangerzone" -CONTAINER_NAME = "ghcr.io/freedomofpress/dangerzone/dangerzone" +CONTAINER_NAME = ( + "ghcr.io/almet/dangerzone/dangerzone" +) # FIXME: Change this to the correct container name log = logging.getLogger(__name__) @@ -228,16 +230,27 @@ def get_image_id_by_digest(digest: str) -> str: return process.stdout.decode().strip().split("\n")[0] -def container_pull(image: str, manifest_digest: str): +def container_pull(image: str, manifest_digest: str, callback: Callable): """Pull a container image from a registry.""" runtime = Runtime() cmd = [str(runtime.path), "pull", f"{image}@sha256:{manifest_digest}"] - try: - subprocess_run(cmd, check=True) - except subprocess.CalledProcessError as e: + process = subprocess.Popen( + cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + text=True, + bufsize=1, + universal_newlines=True, + ) + + for line in process.stdout: # type: ignore + callback(line) + + process.wait() + if process.returncode != 0: raise errors.ContainerPullException( - f"Could not pull the container image: {e}" - ) from e + f"Could not pull the container image: {process.returncode}" + ) def get_local_image_digest(image: str) -> str: diff --git a/dangerzone/gui/main_window.py b/dangerzone/gui/main_window.py index 5924aae..2383222 100644 --- a/dangerzone/gui/main_window.py +++ b/dangerzone/gui/main_window.py @@ -1,3 +1,4 @@ +import io import logging import os import platform @@ -5,22 +6,24 @@ import tempfile import typing from multiprocessing.pool import ThreadPool from pathlib import Path -from typing import List, Optional +from typing import Callable, List, Optional # FIXME: See https://github.com/freedomofpress/dangerzone/issues/320 for more details. if typing.TYPE_CHECKING: from PySide2 import QtCore, QtGui, QtSvg, QtWidgets from PySide2.QtCore import Qt + from PySide2.QtGui import QTextCursor from PySide2.QtWidgets import QAction, QTextEdit else: try: from PySide6 import QtCore, QtGui, QtSvg, QtWidgets from PySide6.QtCore import Qt - from PySide6.QtGui import QAction + from PySide6.QtGui import QAction, QTextCursor from PySide6.QtWidgets import QTextEdit except ImportError: from PySide2 import QtCore, QtGui, QtSvg, QtWidgets from PySide2.QtCore import Qt + from PySide2.QtGui import QTextCursor from PySide2.QtWidgets import QAction, QTextEdit from .. import errors @@ -436,15 +439,21 @@ class MainWindow(QtWidgets.QMainWindow): class InstallContainerThread(QtCore.QThread): finished = QtCore.Signal(str) + process_stdout = QtCore.Signal(str) - def __init__(self, dangerzone: DangerzoneGui) -> None: + def __init__( + self, dangerzone: DangerzoneGui, callback: Optional[Callable] = None + ) -> None: super(InstallContainerThread, self).__init__() self.dangerzone = dangerzone def run(self) -> None: error = None try: - installed = self.dangerzone.isolation_provider.install() + should_upgrade = self.dangerzone.settings.get("updater_check_all") + installed = self.dangerzone.isolation_provider.install( + should_upgrade=bool(should_upgrade), callback=self.process_stdout.emit + ) except Exception as e: log.error("Container installation problem") error = format_exception(e) @@ -479,11 +488,20 @@ class TracebackWidget(QTextEdit): # Enable copying self.setTextInteractionFlags(Qt.TextSelectableByMouse) + self.current_output = "" + def set_content(self, error: Optional[str] = None) -> None: if error: self.setPlainText(error) self.setVisible(True) + def process_output(self, line): + self.current_output += line + self.setText(self.current_output) + cursor = self.textCursor() + cursor.movePosition(QTextCursor.MoveOperation.End) + self.setTextCursor(cursor) + class WaitingWidgetContainer(WaitingWidget): # These are the possible states that the WaitingWidget can show. @@ -623,8 +641,14 @@ class WaitingWidgetContainer(WaitingWidget): "Installing the Dangerzone container image.

" "This might take a few minutes..." ) + self.traceback.setVisible(True) + self.install_container_t = InstallContainerThread(self.dangerzone) self.install_container_t.finished.connect(self.installation_finished) + + self.install_container_t.process_stdout.connect( + self.traceback.process_output + ) self.install_container_t.start() diff --git a/dangerzone/isolation_provider/base.py b/dangerzone/isolation_provider/base.py index 0a12be1..27d1c09 100644 --- a/dangerzone/isolation_provider/base.py +++ b/dangerzone/isolation_provider/base.py @@ -95,7 +95,7 @@ class IsolationProvider(ABC): return self.debug or getattr(sys, "dangerzone_dev", False) @abstractmethod - def install(self) -> bool: + def install(self, should_upgrade: bool, callback: Callable) -> bool: pass def convert( diff --git a/dangerzone/isolation_provider/container.py b/dangerzone/isolation_provider/container.py index a5bb6b7..50bf64d 100644 --- a/dangerzone/isolation_provider/container.py +++ b/dangerzone/isolation_provider/container.py @@ -3,7 +3,7 @@ import os import platform import shlex import subprocess -from typing import List, Tuple +from typing import Callable, List, Tuple from .. import container_utils, errors, updater from ..container_utils import Runtime @@ -94,27 +94,38 @@ class Container(IsolationProvider): return security_args @staticmethod - def install() -> bool: + def install( + should_upgrade: bool, callback: Callable, last_try: bool = False + ) -> bool: """Check if an update is available and install it if necessary.""" - # XXX Do this only if users have opted in to auto-updates - if False: # Comment this for now, just as an exemple of this can be implemented - # # Load the image tarball into the container runtime. + if not should_upgrade: + log.debug("Skipping container upgrade check as requested by the settings") + else: update_available, image_digest = updater.is_update_available( - container_utils.CONTAINER_NAME + container_utils.CONTAINER_NAME, + updater.DEFAULT_PUBKEY_LOCATION, ) if update_available and image_digest: + log.debug("Upgrading container image to %s", image_digest) updater.upgrade_container_image( container_utils.CONTAINER_NAME, image_digest, updater.DEFAULT_PUBKEY_LOCATION, + callback=callback, ) - for tag in old_tags: - tag = container_utils.CONTAINER_NAME + ":" + tag - container_utils.delete_image_tag(tag) - - updater.verify_local_image( - container_utils.CONTAINER_NAME, updater.DEFAULT_PUBKEY_LOCATION - ) + else: + log.debug("No update available for the container") + try: + updater.verify_local_image( + container_utils.CONTAINER_NAME, updater.DEFAULT_PUBKEY_LOCATION + ) + except errors.ImageNotPresentException: + if last_try: + raise + log.debug("Container image not found, trying to install it.") + return Container.install( + should_upgrade=should_upgrade, callback=callback, last_try=True + ) return True diff --git a/dangerzone/updater/signatures.py b/dangerzone/updater/signatures.py index 0c58b8c..5312c2c 100644 --- a/dangerzone/updater/signatures.py +++ b/dangerzone/updater/signatures.py @@ -9,7 +9,7 @@ from hashlib import sha256 from io import BytesIO from pathlib import Path from tempfile import NamedTemporaryFile, TemporaryDirectory -from typing import Dict, List, Optional, Tuple +from typing import Callable, Dict, List, Optional, Tuple from .. import container_utils as runtime from .. import errors as dzerrors @@ -370,7 +370,9 @@ def load_and_verify_signatures( return signatures -def store_signatures(signatures: list[Dict], image_digest: str, pubkey: str) -> None: +def store_signatures( + signatures: list[Dict], image_digest: str, pubkey: str, update_logindex: bool = True +) -> None: """ Store signatures locally in the SIGNATURE_PATH folder, like this: @@ -415,7 +417,8 @@ def store_signatures(signatures: list[Dict], image_digest: str, pubkey: str) -> ) json.dump(signatures, f) - write_log_index(get_log_index_from_signatures(signatures)) + if update_logindex: + write_log_index(get_log_index_from_signatures(signatures)) def verify_local_image(image: str, pubkey: str) -> bool: @@ -479,14 +482,16 @@ def prepare_airgapped_archive(image_name: str, destination: str) -> None: archive.add(tmpdir, arcname=".") -def upgrade_container_image(image: str, manifest_digest: str, pubkey: str) -> str: +def upgrade_container_image( + image: str, manifest_digest: str, pubkey: str, callback: Callable +) -> str: """Verify and upgrade the image to the latest, if signed.""" update_available, remote_digest = registry.is_new_remote_image_available(image) if not update_available: raise errors.ImageAlreadyUpToDate("The image is already up to date") signatures = check_signatures_and_logindex(image, remote_digest, pubkey) - runtime.container_pull(image, manifest_digest) + runtime.container_pull(image, manifest_digest, callback=callback) # Store the signatures just now to avoid storing them unverified store_signatures(signatures, manifest_digest, pubkey) From c9a6689271df03040ff832997005cd7e2956908b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Tue, 4 Mar 2025 10:09:27 +0100 Subject: [PATCH 13/24] Allow a different runtime on `dangerzone-image` commands. This can be done with the newly added `--runtime` flag, which needs to be passed to the first group, e.g: ```bash dangerzone-cli --runtime docker COMMAND ``` --- dangerzone/container_utils.py | 1 - dangerzone/updater/cli.py | 24 ++++++++++++++++++++---- dangerzone/updater/signatures.py | 19 +++++++++++-------- 3 files changed, 31 insertions(+), 13 deletions(-) diff --git a/dangerzone/container_utils.py b/dangerzone/container_utils.py index 26621fb..3a0fbbf 100644 --- a/dangerzone/container_utils.py +++ b/dangerzone/container_utils.py @@ -240,7 +240,6 @@ def container_pull(image: str, manifest_digest: str, callback: Callable): stderr=subprocess.STDOUT, text=True, bufsize=1, - universal_newlines=True, ) for line in process.stdout: # type: ignore diff --git a/dangerzone/updater/cli.py b/dangerzone/updater/cli.py index ee0915b..463722e 100644 --- a/dangerzone/updater/cli.py +++ b/dangerzone/updater/cli.py @@ -1,9 +1,12 @@ #!/usr/bin/python +import functools import logging import click +from .. import container_utils +from ..container_utils import get_runtime_name from . import attestations, errors, log, registry, signatures DEFAULT_REPOSITORY = "freedomofpress/dangerzone" @@ -13,7 +16,8 @@ DEFAULT_IMAGE_NAME = "ghcr.io/freedomofpress/dangerzone/dangerzone" @click.group() @click.option("--debug", is_flag=True) -def main(debug: bool) -> None: +@click.option("--runtime", default=get_runtime_name()) +def main(debug: bool, runtime: str) -> None: if debug: click.echo("Debug mode enabled") level = logging.DEBUG @@ -21,6 +25,10 @@ def main(debug: bool) -> None: level = logging.INFO logging.basicConfig(level=level) + if runtime != get_runtime_name(): + click.echo(f"Using container runtime: {runtime}") + container_utils.RUNTIME_NAME = runtime + @main.command() @click.argument("image", default=DEFAULT_IMAGE_NAME) @@ -28,8 +36,10 @@ def main(debug: bool) -> None: def upgrade(image: str, pubkey: str) -> None: """Upgrade the image to the latest signed version.""" manifest_digest = registry.get_manifest_digest(image) + try: - signatures.upgrade_container_image(image, manifest_digest, pubkey) + callback = functools.partial(click.echo, nl=False) + signatures.upgrade_container_image(image, manifest_digest, pubkey, callback) click.echo(f"✅ The local image {image} has been upgraded") click.echo(f"✅ The image has been signed with {pubkey}") click.echo(f"✅ Signatures has been verified and stored locally") @@ -56,17 +66,23 @@ def store_signatures(image: str, pubkey: str) -> None: @main.command() @click.argument("image_filename") @click.option("--pubkey", default=signatures.DEFAULT_PUBKEY_LOCATION) -def load_archive(image_filename: str, pubkey: str) -> None: +@click.option("--force", is_flag=True) +def load_archive(image_filename: str, pubkey: str, force: bool) -> None: """Upgrade the local image to the one in the archive.""" try: loaded_image = signatures.upgrade_container_image_airgapped( - image_filename, pubkey + image_filename, pubkey, bypass_logindex=force ) click.echo( f"✅ Installed image {image_filename} on the system as {loaded_image}" ) except errors.ImageAlreadyUpToDate as e: click.echo(f"✅ {e}") + except errors.InvalidLogIndex as e: + click.echo(f"❌ Trying to install image older that the currently installed one") + raise click.Abort() + except Exception as e: + click.echo(f"❌ {e}") raise click.Abort() diff --git a/dangerzone/updater/signatures.py b/dangerzone/updater/signatures.py index 5312c2c..b620c1a 100644 --- a/dangerzone/updater/signatures.py +++ b/dangerzone/updater/signatures.py @@ -216,7 +216,9 @@ def _get_blob(tmpdir: str, digest: str) -> Path: return Path(tmpdir) / "blobs" / "sha256" / digest.replace("sha256:", "") -def upgrade_container_image_airgapped(container_tar: str, pubkey: str) -> str: +def upgrade_container_image_airgapped( + container_tar: str, pubkey: str, bypass_logindex: bool = False +) -> str: """ Verify the given archive against its self-contained signatures, then upgrade the image and retag it to the expected tag. @@ -262,14 +264,15 @@ def upgrade_container_image_airgapped(container_tar: str, pubkey: str) -> str: image_name, signatures = convert_oci_images_signatures(json.load(f), tmpdir) log.info(f"Found image name: {image_name}") - # Ensure that we only upgrade if the log index is higher than the last known one - incoming_log_index = get_log_index_from_signatures(signatures) - last_log_index = get_last_log_index() + if not bypass_logindex: + # Ensure that we only upgrade if the log index is higher than the last known one + incoming_log_index = get_log_index_from_signatures(signatures) + last_log_index = get_last_log_index() - if incoming_log_index < last_log_index: - raise errors.InvalidLogIndex( - "The log index is not higher than the last known one" - ) + if incoming_log_index < last_log_index: + raise errors.InvalidLogIndex( + "The log index is not higher than the last known one" + ) image_digest = index_json["manifests"][0].get("digest").replace("sha256:", "") From 18331d19883ccc819c850b4ab7a1b51aec1dc186 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Wed, 16 Apr 2025 15:23:55 +0200 Subject: [PATCH 14/24] Make the `upgrade_container_image()` `callback` argument optional --- dangerzone/container_utils.py | 13 +++--- dangerzone/updater/signatures.py | 2 +- tests/test_signatures.py | 79 -------------------------------- 3 files changed, 8 insertions(+), 86 deletions(-) diff --git a/dangerzone/container_utils.py b/dangerzone/container_utils.py index 3a0fbbf..f0735d0 100644 --- a/dangerzone/container_utils.py +++ b/dangerzone/container_utils.py @@ -11,9 +11,7 @@ from .settings import Settings from .util import get_resource_path, get_subprocess_startupinfo OLD_CONTAINER_NAME = "dangerzone.rocks/dangerzone" -CONTAINER_NAME = ( - "ghcr.io/almet/dangerzone/dangerzone" -) # FIXME: Change this to the correct container name +CONTAINER_NAME = "ghcr.io/almet/dangerzone/dangerzone" # FIXME: Change this to the correct container name log = logging.getLogger(__name__) @@ -230,7 +228,9 @@ def get_image_id_by_digest(digest: str) -> str: return process.stdout.decode().strip().split("\n")[0] -def container_pull(image: str, manifest_digest: str, callback: Callable): +def container_pull( + image: str, manifest_digest: str, callback: Optional[Callable] = None +): """Pull a container image from a registry.""" runtime = Runtime() cmd = [str(runtime.path), "pull", f"{image}@sha256:{manifest_digest}"] @@ -242,8 +242,9 @@ def container_pull(image: str, manifest_digest: str, callback: Callable): bufsize=1, ) - for line in process.stdout: # type: ignore - callback(line) + if callback: + for line in process.stdout: # type: ignore + callback(line) process.wait() if process.returncode != 0: diff --git a/dangerzone/updater/signatures.py b/dangerzone/updater/signatures.py index b620c1a..5670443 100644 --- a/dangerzone/updater/signatures.py +++ b/dangerzone/updater/signatures.py @@ -486,7 +486,7 @@ def prepare_airgapped_archive(image_name: str, destination: str) -> None: def upgrade_container_image( - image: str, manifest_digest: str, pubkey: str, callback: Callable + image: str, manifest_digest: str, pubkey: str, callback: Optional[Callable] = None ) -> str: """Verify and upgrade the image to the latest, if signed.""" update_available, remote_digest = registry.is_new_remote_image_available(image) diff --git a/tests/test_signatures.py b/tests/test_signatures.py index b744db8..e7ca24a 100644 --- a/tests/test_signatures.py +++ b/tests/test_signatures.py @@ -278,85 +278,6 @@ def test_stores_signatures_updates_last_log_index(valid_signature, mocker, tmp_p return_value=100, ) - # Call store_signatures - with pytest.raises(errors.SignatureMismatch): - store_signatures(signatures, image_digest, TEST_PUBKEY_PATH) - ("dangerzone.updater.signatures.get_last_log_index",) - # Verify that the signatures file was not created - assert not (signatures_path / f"{image_digest}.json").exists() - - # Verify that the log index file was not updated - assert not (signatures_path / "last_log_index").exists() - - -def test_stores_signatures_updates_last_log_index(valid_signature, mocker, tmp_path): - """Test that store_signatures updates the last log index file.""" - signatures = [valid_signature] - # Extract the digest from the signature - image_digest = Signature(valid_signature).manifest_digest - signatures = [valid_signature, signature_other_digest] - breakpoint() - valid_signature, signature_other_digest, mocker, tmp_path - - """Test that store_signatures raises an error when a signature's digest doesn't match.""" - - image_digest = "sha256:123456" - - # Mock the signatures path - signatures_path = tmp_path / "signatures" - signatures_path.mkdir() - mocker.patch("dangerzone.updater.signatures.SIGNATURES_PATH", signatures_path) - - # Mock get_log_index_from_signatures - mocker.patch( - "dangerzone.updater.signatures.get_log_index_from_signatures", - return_value=100, - ) - - # Mock get_last_log_index - mocker.patch( - "dangerzone.updater.signatures.get_last_log_index", - return_value=50, - ) - - -def test_stores_signatures_updates_last_log_index(): - pass - - # Mock the signatures path - signatures_path = tmp_path / "signatures" - signatures_path.mkdir() - mocker.patch("dangerzone.updater.signatures.SIGNATURES_PATH", signatures_path) - - # Mock get_log_index_from_signatures - mocker.patch( - "dangerzone.updater.signatures.get_log_index_from_signatures", - return_value=100, - ) - - # Mock get_last_log_index - mocker.patch( - "dangerzone.updater.signatures.get_last_log_index", - return_value=50, - ) - - -def test_get_file_digest(): - # Mock the signatures path - signatures_path = tmp_path / "signatures" - signatures_path.mkdir() - mocker.patch("dangerzone.updater.signatures.SIGNATURES_PATH", signatures_path) - - # Create an existing last_log_index file with a lower value - with open(signatures_path / "last_log_index", "w") as f: - f.write("50") - - # Mock get_log_index_from_signatures to return a higher value - mocker.patch( - "dangerzone.updater.signatures.get_log_index_from_signatures", - return_value=100, - ) - # Call store_signatures store_signatures(signatures, image_digest, TEST_PUBKEY_PATH) From acd8717839c0980cd08f27d483b4fa1a29547fad Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Thu, 17 Apr 2025 17:16:10 +0200 Subject: [PATCH 15/24] Update container installation logic to allow in-place updates The isolation provider `install()` method is now passed a `should_upgrade` argument, which is read from the settings and represents the user decision about updates. The tests have been updated to reflect these changes. --- dangerzone/cli.py | 5 +- dangerzone/gui/main_window.py | 4 +- dangerzone/isolation_provider/container.py | 55 ++++++---- dangerzone/isolation_provider/dummy.py | 2 +- dangerzone/isolation_provider/qubes.py | 2 +- tests/conftest.py | 1 + tests/gui/test_updater.py | 6 +- tests/isolation_provider/test_container.py | 119 +++++++++++++-------- 8 files changed, 120 insertions(+), 74 deletions(-) diff --git a/dangerzone/cli.py b/dangerzone/cli.py index 1353995..d5699dd 100644 --- a/dangerzone/cli.py +++ b/dangerzone/cli.py @@ -71,8 +71,8 @@ def cli_main( ) -> None: setup_logging() display_banner() + settings = Settings() if set_container_runtime: - settings = Settings() if set_container_runtime == "default": settings.unset_custom_runtime() click.echo( @@ -117,7 +117,8 @@ def cli_main( sys.exit(1) # Ensure container is installed - dangerzone.isolation_provider.install() + should_upgrade = bool(settings.get("updater_check_all")) + dangerzone.isolation_provider.install(should_upgrade) # Convert the document print_header("Converting document to safe PDF") diff --git a/dangerzone/gui/main_window.py b/dangerzone/gui/main_window.py index 2383222..f78b6ad 100644 --- a/dangerzone/gui/main_window.py +++ b/dangerzone/gui/main_window.py @@ -450,9 +450,9 @@ class InstallContainerThread(QtCore.QThread): def run(self) -> None: error = None try: - should_upgrade = self.dangerzone.settings.get("updater_check_all") + should_upgrade = bool(self.dangerzone.settings.get("updater_check_all")) installed = self.dangerzone.isolation_provider.install( - should_upgrade=bool(should_upgrade), callback=self.process_stdout.emit + should_upgrade=should_upgrade, callback=self.process_stdout.emit ) except Exception as e: log.error("Container installation problem") diff --git a/dangerzone/isolation_provider/container.py b/dangerzone/isolation_provider/container.py index 50bf64d..f6f8e03 100644 --- a/dangerzone/isolation_provider/container.py +++ b/dangerzone/isolation_provider/container.py @@ -95,31 +95,49 @@ class Container(IsolationProvider): @staticmethod def install( - should_upgrade: bool, callback: Callable, last_try: bool = False + should_upgrade: bool, + callback: Optional[Callable] = sys.stdout.write, + last_try: bool = False, ) -> bool: - """Check if an update is available and install it if necessary.""" + """ + Install a (local or remote) container image. + + Use the local `container.tar` image if: + + - No image is currently installed and `should_upgrade` is set to False + - No image is currently installed and no upgrades are available + + Upgrade to the last remote container image if: + + - An upgrade is available and `should_upgrade` is set to True + """ + + installed_tags = container_utils.list_image_tags() if not should_upgrade: log.debug("Skipping container upgrade check as requested by the settings") + if not installed_tags: + install_local_container_tar() else: - update_available, image_digest = updater.is_update_available( - container_utils.CONTAINER_NAME, - updater.DEFAULT_PUBKEY_LOCATION, + update_available, image_digest = is_update_available( + CONTAINER_NAME, + DEFAULT_PUBKEY_LOCATION, ) if update_available and image_digest: log.debug("Upgrading container image to %s", image_digest) - updater.upgrade_container_image( - container_utils.CONTAINER_NAME, + upgrade_container_image( + CONTAINER_NAME, image_digest, - updater.DEFAULT_PUBKEY_LOCATION, + DEFAULT_PUBKEY_LOCATION, callback=callback, ) else: - log.debug("No update available for the container") + log.debug("No update available for the container.") + if not installed_tags: + install_local_container_tar() try: - updater.verify_local_image( - container_utils.CONTAINER_NAME, updater.DEFAULT_PUBKEY_LOCATION - ) - except errors.ImageNotPresentException: + verify_local_image(CONTAINER_NAME) + except UpdaterError: + # delete_image() if last_try: raise log.debug("Container image not found, trying to install it.") @@ -210,13 +228,8 @@ class Container(IsolationProvider): ) -> subprocess.Popen: runtime = Runtime() - image_digest = container_utils.get_local_image_digest( - container_utils.CONTAINER_NAME - ) - updater.verify_local_image( - container_utils.CONTAINER_NAME, - updater.DEFAULT_PUBKEY_LOCATION, - ) + image_digest = container_utils.get_local_image_digest(CONTAINER_NAME) + verify_local_image(CONTAINER_NAME) security_args = self.get_runtime_security_args() debug_args = [] if self.debug: @@ -225,7 +238,7 @@ class Container(IsolationProvider): enable_stdin = ["-i"] set_name = ["--name", name] prevent_leakage_args = ["--rm"] - image_name = [container_utils.CONTAINER_NAME + "@sha256:" + image_digest] + image_name = [CONTAINER_NAME + "@sha256:" + image_digest] args = ( ["run"] + security_args diff --git a/dangerzone/isolation_provider/dummy.py b/dangerzone/isolation_provider/dummy.py index fac973f..a70a4ef 100644 --- a/dangerzone/isolation_provider/dummy.py +++ b/dangerzone/isolation_provider/dummy.py @@ -36,7 +36,7 @@ class Dummy(IsolationProvider): ) super().__init__() - def install(self) -> bool: + def install(self, *args, **kwargs) -> bool: return True @staticmethod diff --git a/dangerzone/isolation_provider/qubes.py b/dangerzone/isolation_provider/qubes.py index d46e6cc..defc4e8 100644 --- a/dangerzone/isolation_provider/qubes.py +++ b/dangerzone/isolation_provider/qubes.py @@ -18,7 +18,7 @@ log = logging.getLogger(__name__) class Qubes(IsolationProvider): """Uses a disposable qube for performing the conversion""" - def install(self) -> bool: + def install(self, *args, **kwargs) -> bool: return True @staticmethod diff --git a/tests/conftest.py b/tests/conftest.py index 64f1a44..1b8a9ca 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -9,6 +9,7 @@ import pytest from dangerzone.document import SAFE_EXTENSION from dangerzone.gui import Application +from dangerzone.isolation_provider import container sys.dangerzone_dev = True # type: ignore[attr-defined] diff --git a/tests/gui/test_updater.py b/tests/gui/test_updater.py index a0ee4ad..614b4cf 100644 --- a/tests/gui/test_updater.py +++ b/tests/gui/test_updater.py @@ -283,13 +283,13 @@ def test_update_errors( ) -> None: """Test update check errors.""" settings = updater.dangerzone.settings + # Always assume that we can perform multiple update checks in a row. + monkeypatch.setattr(releases, "_should_postpone_update_check", lambda _: False) + # Mock requests.get(). mocker.patch("dangerzone.updater.releases.requests.get") requests_mock = releases.requests.get - # Always assume that we can perform multiple update checks in a row. - monkeypatch.setattr(releases, "_should_postpone_update_check", lambda: False) - # Test 1 - Check that request exceptions are being detected as errors. requests_mock.side_effect = Exception("bad url") # type: ignore [attr-defined] report = releases.check_for_updates(settings) diff --git a/tests/isolation_provider/test_container.py b/tests/isolation_provider/test_container.py index 8a5f170..fc74eb3 100644 --- a/tests/isolation_provider/test_container.py +++ b/tests/isolation_provider/test_container.py @@ -6,9 +6,10 @@ from pytest_mock import MockerFixture from pytest_subprocess import FakeProcess from dangerzone import errors -from dangerzone.container_utils import Runtime +from dangerzone.container_utils import CONTAINER_NAME, Runtime from dangerzone.isolation_provider.container import Container from dangerzone.isolation_provider.qubes import is_qubes_native_conversion +from dangerzone.updater import SignatureError, UpdaterError from dangerzone.util import get_resource_path from .base import IsolationProviderTermination, IsolationProviderTest @@ -57,8 +58,13 @@ class TestContainer(IsolationProviderTest): ) provider.is_available() - def test_install_raise_if_image_cant_be_installed( - self, provider: Container, fp: FakeProcess, runtime_path: str + def test_install_raise_if_local_image_cant_be_installed( + self, + provider: Container, + fp: FakeProcess, + runtime_path: str, + skip_image_verification, + mocker: MockerFixture, ) -> None: """When an image installation fails, an exception should be raised""" @@ -74,60 +80,85 @@ class TestContainer(IsolationProviderTest): "list", "--format", "{{ .Tag }}", - "dangerzone.rocks/dangerzone", + CONTAINER_NAME, ], occurrences=2, ) - - fp.register_subprocess( - [ - runtime_path, - "load", - "-i", - get_resource_path("container.tar").absolute(), - ], - returncode=-1, + mocker.patch( + "dangerzone.isolation_provider.container.install_local_container_tar", + side_effect=UpdaterError, ) - with pytest.raises(errors.ImageInstallationException): - provider.install() + with pytest.raises(UpdaterError): + provider.install(should_upgrade=False) - def test_install_raises_if_still_not_installed( - self, provider: Container, fp: FakeProcess, runtime_path: str + def test_install_raise_if_local_image_cant_be_verified( + self, + provider: Container, + runtime_path: str, + skip_image_verification, + mocker: MockerFixture, ) -> None: - """When an image keep being not installed, it should return False""" - fp.register_subprocess( - [runtime_path, "version", "-f", "{{.Client.Version}}"], - stdout="4.0.0", + """In case an image has been installed but its signature cannot be verified, an exception should be raised""" + + mocker.patch( + "dangerzone.isolation_provider.container.container_utils.list_image_tags", + return_value=["a-tag"], + ) + mocker.patch( + "dangerzone.isolation_provider.container.verify_local_image", + side_effect=SignatureError, ) - fp.register_subprocess( - [runtime_path, "image", "ls"], + with pytest.raises(SignatureError): + provider.install(should_upgrade=False) + + def test_install_raise_if_local_image_install_works_on_second_try( + self, + provider: Container, + runtime_path: str, + skip_image_verification, + mocker: MockerFixture, + ) -> None: + """In case an image has been installed but its signature cannot be verified, an exception should be raised""" + + mocker.patch( + "dangerzone.isolation_provider.container.container_utils.list_image_tags", + return_value=["a-tag"], + ) + mocker.patch( + "dangerzone.isolation_provider.container.verify_local_image", + side_effect=[SignatureError, True], ) - # First check should return nothing. - fp.register_subprocess( - [ - runtime_path, - "image", - "list", - "--format", - "{{ .Tag }}", - "dangerzone.rocks/dangerzone", - ], - occurrences=2, + provider.install(should_upgrade=False) + + def test_install_upgrades_if_available( + self, + provider: Container, + runtime_path: str, + skip_image_verification, + mocker: MockerFixture, + ) -> None: + """In case an image has been installed but its signature cannot be verified, an exception should be raised""" + + mocker.patch( + "dangerzone.isolation_provider.container.container_utils.list_image_tags", + return_value=["a-tag"], + ) + mocker.patch( + "dangerzone.isolation_provider.container.is_update_available", + return_value=(True, "digest"), + ) + upgrade = mocker.patch( + "dangerzone.isolation_provider.container.upgrade_container_image", + ) + mocker.patch( + "dangerzone.isolation_provider.container.verify_local_image", ) - fp.register_subprocess( - [ - runtime_path, - "load", - "-i", - get_resource_path("container.tar").absolute(), - ], - ) - with pytest.raises(errors.ImageNotPresentException): - provider.install() + provider.install(should_upgrade=True) + upgrade.assert_called() @pytest.mark.skipif( platform.system() not in ("Windows", "Darwin"), From a5636b5e74f3307ab9fd0cd49cf6b6984a79a5eb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Thu, 17 Apr 2025 17:19:04 +0200 Subject: [PATCH 16/24] dangerzone.updater exposes a few funtions, constants and exceptions This is done to avoid looking at the internal logic of `dangerzone.updater`. Only the features that actually are part of the exposed API are exposed, and do not require deep knowledge of the updater's logic to be used. --- dangerzone/isolation_provider/container.py | 15 ++++++++++++--- dangerzone/updater/__init__.py | 9 +++++++++ 2 files changed, 21 insertions(+), 3 deletions(-) diff --git a/dangerzone/isolation_provider/container.py b/dangerzone/isolation_provider/container.py index f6f8e03..317315b 100644 --- a/dangerzone/isolation_provider/container.py +++ b/dangerzone/isolation_provider/container.py @@ -3,11 +3,20 @@ import os import platform import shlex import subprocess -from typing import Callable, List, Tuple +import sys +from typing import Callable, List, Optional, Tuple -from .. import container_utils, errors, updater -from ..container_utils import Runtime +from .. import container_utils, errors +from ..container_utils import CONTAINER_NAME, Runtime from ..document import Document +from ..updater import ( + DEFAULT_PUBKEY_LOCATION, + UpdaterError, + install_local_container_tar, + is_update_available, + upgrade_container_image, + verify_local_image, +) from ..util import get_resource_path, get_subprocess_startupinfo from .base import IsolationProvider, terminate_process_group diff --git a/dangerzone/updater/__init__.py b/dangerzone/updater/__init__.py index 3988bf1..929ea45 100644 --- a/dangerzone/updater/__init__.py +++ b/dangerzone/updater/__init__.py @@ -1,3 +1,12 @@ import logging log = logging.getLogger(__name__) + +from .errors import SignatureError, UpdaterError +from .signatures import ( + DEFAULT_PUBKEY_LOCATION, + install_local_container_tar, + is_update_available, + upgrade_container_image, + verify_local_image, +) From 1079f1335b8b02a8f17f2f81aa7d95aa1c232258 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Thu, 17 Apr 2025 17:23:31 +0200 Subject: [PATCH 17/24] Provide a simple function to install the shipped tarball. It leaves in `dangerzone.updater.install_local_container_tar()` --- dangerzone/updater/signatures.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/dangerzone/updater/signatures.py b/dangerzone/updater/signatures.py index 5670443..a8f7747 100644 --- a/dangerzone/updater/signatures.py +++ b/dangerzone/updater/signatures.py @@ -499,3 +499,11 @@ def upgrade_container_image( # Store the signatures just now to avoid storing them unverified store_signatures(signatures, manifest_digest, pubkey) return manifest_digest + + +def install_local_container_tar( + pubkey: Optional[str] = DEFAULT_PUBKEY_LOCATION, +) -> None: + tarball_path = get_resource_path("container.tar") + log.debug("Installing container image %s", tarball_path) + upgrade_container_image_airgapped(tarball_path, pubkey) From 4cedf5bf86b2a6c426ce8608d9939000edebd96a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Thu, 17 Apr 2025 17:26:11 +0200 Subject: [PATCH 18/24] Skip container signature verification during the tests This is not required, and skipping them allows to make the whole test-suite run faster. --- dangerzone/updater/signatures.py | 2 +- tests/conftest.py | 8 ++++++++ tests/isolation_provider/test_container.py | 2 +- tests/test_cli.py | 7 ++++++- 4 files changed, 16 insertions(+), 3 deletions(-) diff --git a/dangerzone/updater/signatures.py b/dangerzone/updater/signatures.py index a8f7747..e4cf5d7 100644 --- a/dangerzone/updater/signatures.py +++ b/dangerzone/updater/signatures.py @@ -424,7 +424,7 @@ def store_signatures( write_log_index(get_log_index_from_signatures(signatures)) -def verify_local_image(image: str, pubkey: str) -> bool: +def verify_local_image(image: str, pubkey: str = DEFAULT_PUBKEY_LOCATION) -> bool: """ Verifies that a local image has a valid signature """ diff --git a/tests/conftest.py b/tests/conftest.py index 1b8a9ca..f413d11 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -119,6 +119,14 @@ def sample_pdf() -> str: return str(test_docs_dir.joinpath(BASIC_SAMPLE_PDF)) +@pytest.fixture +def skip_image_verification(monkeypatch): + def noop(*args, **kwargs): + return True + + monkeypatch.setattr(container, "verify_local_image", noop) + + SAMPLE_DIRECTORY = "test_docs" BASIC_SAMPLE_PDF = "sample-pdf.pdf" BASIC_SAMPLE_DOC = "sample-doc.doc" diff --git a/tests/isolation_provider/test_container.py b/tests/isolation_provider/test_container.py index fc74eb3..81f147d 100644 --- a/tests/isolation_provider/test_container.py +++ b/tests/isolation_provider/test_container.py @@ -22,7 +22,7 @@ elif os.environ.get("DUMMY_CONVERSION", False): @pytest.fixture -def provider() -> Container: +def provider(skip_image_verification: None) -> Container: return Container() diff --git a/tests/test_cli.py b/tests/test_cli.py index dbaa880..ffb6022 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -202,7 +202,12 @@ class TestCliConversion(TestCliBasic): result.assert_success() @for_each_doc - def test_formats(self, doc: Path, tmp_path_factory: pytest.TempPathFactory) -> None: + def test_formats( + self, + doc: Path, + tmp_path_factory: pytest.TempPathFactory, + skip_image_verification: pytest.FixtureRequest, + ) -> None: reference = (doc.parent / "reference" / doc.stem).with_suffix(".pdf") destination = tmp_path_factory.mktemp(doc.stem).with_suffix(".pdf") From 4c9139201fabffdc24752cfca659e351dbe3f175 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Thu, 17 Apr 2025 17:57:35 +0200 Subject: [PATCH 19/24] Remove duplicated python3 dependency from Dockerfile --- Dockerfile | 2 +- Dockerfile.in | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index a749560..70435ab 100644 --- a/Dockerfile +++ b/Dockerfile @@ -35,7 +35,7 @@ RUN \ apt-get update && \ apt-get install -y --no-install-recommends \ python3 python3-fitz libreoffice-nogui libreoffice-java-common \ - python3 python3-magic default-jre-headless fonts-noto-cjk fonts-dejavu \ + python3-magic default-jre-headless fonts-noto-cjk fonts-dejavu \ runsc unzip wget && \ : "Clean up for improving reproducibility (optional)" && \ rm -rf /var/cache/fontconfig/ && \ diff --git a/Dockerfile.in b/Dockerfile.in index 14f899d..148c6f7 100644 --- a/Dockerfile.in +++ b/Dockerfile.in @@ -35,7 +35,7 @@ RUN \ apt-get update && \ apt-get install -y --no-install-recommends \ python3 python3-fitz libreoffice-nogui libreoffice-java-common \ - python3 python3-magic default-jre-headless fonts-noto-cjk fonts-dejavu \ + python3-magic default-jre-headless fonts-noto-cjk fonts-dejavu \ runsc unzip wget && \ : "Clean up for improving reproducibility (optional)" && \ rm -rf /var/cache/fontconfig/ && \ From 06cbb13269ed80f771c73adac23dde00e26cf44e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Tue, 22 Apr 2025 12:51:01 +0200 Subject: [PATCH 20/24] Use a specific error if no signatures files are found --- dangerzone/updater/signatures.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/dangerzone/updater/signatures.py b/dangerzone/updater/signatures.py index e4cf5d7..b97a550 100644 --- a/dangerzone/updater/signatures.py +++ b/dangerzone/updater/signatures.py @@ -358,12 +358,21 @@ def load_and_verify_signatures( pubkey_signatures = signatures_path / get_file_digest(pubkey) if not pubkey_signatures.exists(): msg = ( - f"Cannot find a '{pubkey_signatures}' folder." + f"Cannot find a '{pubkey_signatures}' folder. " "You might need to download the image signatures first." ) raise errors.SignaturesFolderDoesNotExist(msg) - with open(pubkey_signatures / f"{image_digest}.json") as f: + signatures_file = pubkey_signatures / f"{image_digest}.json" + + if not signatures_file.exists(): + msg = ( + f"Cannot find a '{signatures_file}' file. " + "You might need to download the image signatures first." + ) + raise errors.LocalSignatureNotFound(msg) + + with open(signatures_file) as f: log.debug("Loading signatures from %s", f.name) signatures = json.load(f) From 66b906a8ee6f27894da1acb7a22bf539b9346564 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Tue, 22 Apr 2025 12:52:03 +0200 Subject: [PATCH 21/24] Fix runtime error in repro build Reference Docker rather than Podman in the error, otherwise it can be misleading. --- dev_scripts/repro-build.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev_scripts/repro-build.py b/dev_scripts/repro-build.py index 48bb835..738020c 100755 --- a/dev_scripts/repro-build.py +++ b/dev_scripts/repro-build.py @@ -156,7 +156,7 @@ def parse_buildkit_args(args, runtime: str) -> str: return [] if runtime != "podman": - raise RuntimeError("Cannot specify BuildKit arguments using the Podman runtime") + raise RuntimeError("Cannot specify BuildKit arguments using the Docker runtime") return shlex.split(args.buildkit_args) From dce91eaa260174fd324a2f99cd2a4e5e4c3ebae0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Tue, 22 Apr 2025 12:54:12 +0200 Subject: [PATCH 22/24] Update the image location to track `ghcr.io/freedomofpress` --- dangerzone/container_utils.py | 2 +- install/common/build-image.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/dangerzone/container_utils.py b/dangerzone/container_utils.py index f0735d0..9cb79e0 100644 --- a/dangerzone/container_utils.py +++ b/dangerzone/container_utils.py @@ -11,7 +11,7 @@ from .settings import Settings from .util import get_resource_path, get_subprocess_startupinfo OLD_CONTAINER_NAME = "dangerzone.rocks/dangerzone" -CONTAINER_NAME = "ghcr.io/almet/dangerzone/dangerzone" # FIXME: Change this to the correct container name +CONTAINER_NAME = "ghcr.io/freedomofpress/dangerzone/dangerzone" log = logging.getLogger(__name__) diff --git a/install/common/build-image.py b/install/common/build-image.py index 868c5b1..87d5615 100644 --- a/install/common/build-image.py +++ b/install/common/build-image.py @@ -6,7 +6,7 @@ import sys from pathlib import Path BUILD_CONTEXT = "dangerzone" -IMAGE_NAME = "dangerzone.rocks/dangerzone" +IMAGE_NAME = "ghcr.io/freedomofpress/dangerzone/dangerzone" if platform.system() in ["Darwin", "Windows"]: CONTAINER_RUNTIME = "docker" elif platform.system() == "Linux": From 59d3bba835b6171a0e7516cdeccbc57a2309f9d3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Tue, 22 Apr 2025 17:45:53 +0200 Subject: [PATCH 23/24] CI: Add an option to attach container signatures to the registry The `build-push-image.yml` reusable workflow can generate keypairs and sign the container images with them. This is only used by the CI, to test that a valid signature is actually detected as such. --- .github/workflows/build-push-image.yml | 56 ++++++++++++++++++- .github/workflows/ci.yml | 52 +++++------------ .github/workflows/release-container-image.yml | 1 + 3 files changed, 70 insertions(+), 39 deletions(-) diff --git a/.github/workflows/build-push-image.yml b/.github/workflows/build-push-image.yml index e6a7892..af48634 100644 --- a/.github/workflows/build-push-image.yml +++ b/.github/workflows/build-push-image.yml @@ -15,11 +15,21 @@ on: reproduce: required: true type: boolean + sign: + required: true + type: boolean + key_name: + required: false + type: string + default: "dangerzone-tests" + key_cache: + required: false + type: string + default: "v1-keypair-${{ github.ref_name }}" # unique for the branch / PR secrets: registry_token: required: true - jobs: lint: runs-on: ubuntu-latest @@ -44,6 +54,7 @@ jobs: debian_archive_date: ${{ steps.params.outputs.debian_archive_date }} source_date_epoch: ${{ steps.params.outputs.source_date_epoch }} image: ${{ steps.params.outputs.full_image_name }} + tag: ${{ steps.params.outputs.tag }} steps: - uses: actions/checkout@v4 with: @@ -60,7 +71,7 @@ jobs: echo "debian_archive_date=${DEBIAN_ARCHIVE_DATE}" >> $GITHUB_OUTPUT echo "source_date_epoch=${SOURCE_DATE_EPOCH}" >> $GITHUB_OUTPUT - echo "tag=${DEBIAN_ARCHIVE_DATE}-${TAG}" >> $GITHUB_OUTPUT + echo "tag=${TAG}" >> $GITHUB_OUTPUT echo "full_image_name=${FULL_IMAGE_NAME}" >> $GITHUB_OUTPUT echo "buildkit_image=${BUILDKIT_IMAGE}" >> $GITHUB_OUTPUT @@ -73,6 +84,7 @@ jobs: debian_archive_date: ${{ needs.prepare.outputs.debian_archive_date }} source_date_epoch: ${{ needs.prepare.outputs.source_date_epoch }} image: ${{ needs.prepare.outputs.image }} + tag: ${{ needs.prepare.outputs.tag }} strategy: fail-fast: false matrix: @@ -140,6 +152,7 @@ jobs: debian_archive_date: ${{ needs.build.outputs.debian_archive_date }} source_date_epoch: ${{ needs.build.outputs.source_date_epoch }} image: ${{ needs.build.outputs.image }} + tag: ${{ needs.build.outputs.tag }} digest_root: ${{ steps.image.outputs.digest_root }} digest_amd64: ${{ steps.image.outputs.digest_amd64 }} digest_arm64: ${{ steps.image.outputs.digest_arm64 }} @@ -246,3 +259,42 @@ jobs: --platform \ linux/${{ matrix.platform.name }} \ ${{ needs.merge.outputs[format('digest_{0}', matrix.platform.name)] }} + + sign: + if: ${{ inputs.sign }} + runs-on: "ubuntu-latest" + env: + COSIGN_PASSWORD: "password" + COSIGN_YES: true + needs: + - merge + # outputs: add signature location ? + steps: + - name: Install Cosign + uses: sigstore/cosign-installer@d7d6bc7722e3daa8354c50bcb52f4837da5e9b6a + with: + cosign-release: 'v2.5.0' + - name: Check install + run: cosign version + - name: Generate keypair + run: |- + cosign generate-key-pair --output-key-prefix="${{ inputs.key_name }}" + - name: Cache keypair + uses: actions/cache@v4 + with: + path: "${{ inputs.key_name }}.*" + key: ${{ inputs.key_cache }} + enableCrossOsArchive: true + + - name: Login to GHCR + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ inputs.registry_user }} + password: ${{ secrets.registry_token }} + + - name: Sign container + run: |- + export IMAGE_URI="${{ inputs.registry }}/${{ inputs.image_name }}:${{ needs.merge.outputs.tag }}@${{ needs.merge.outputs.digest_root }}" + cosign sign -d --yes --key=${{ inputs.key_name }}.key "$IMAGE_URI" + shell: bash diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ee7bd73..a862710 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -11,11 +11,10 @@ on: permissions: packages: write + actions: read # for detecting the Github Actions environment. + id-token: write # for creating OIDC tokens for signing. env: - REGISTRY_USER: ${{ github.actor }} - REGISTRY_PASSWORD: ${{ github.token }} - IMAGE_REGISTRY: ghcr.io/${{ github.repository_owner }} QT_SELECT: "qt6" # Disable multiple concurrent runs on the same branch @@ -45,35 +44,18 @@ jobs: # This is already built daily by the "build.yml" file # But we also want to include this in the checks that run on each push. build-container-image: - runs-on: ubuntu-24.04 - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - - name: Get current date - id: date - run: echo "date=$(date +'%Y-%m-%d')" >> $GITHUB_OUTPUT - - - name: Cache container image - id: cache-container-image - uses: actions/cache@v4 - with: - key: v5-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/*.py', 'dangerzone/container_helpers/*', 'install/common/build-image.py') }} - path: |- - share/container.tar - share/image-id.txt - - - name: Build Dangerzone container image - if: ${{ steps.cache-container-image.outputs.cache-hit != 'true' }} - run: | - python3 ./install/common/build-image.py - - - name: Upload container image - uses: actions/upload-artifact@v4 - with: - name: container.tar - path: share/container.tar + name: Build, push and sign container image + uses: ./.github/workflows/build-push-image.yml + with: + registry: "ghcr.io/${{ github.repository_owner }}" + registry_user: ${{ github.actor }} + image_name: "dangerzone/dangerzone-staging" + reproduce: false + sign: true + key_name: "dangerzone-tests" + key_cache: "v1-test-keypair-${{ github.ref_name }}" + secrets: + registry_token: ${{ secrets.GITHUB_TOKEN }} download-tessdata: name: Download and cache Tesseract data @@ -227,9 +209,7 @@ jobs: uses: actions/cache/restore@v4 with: key: v5-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/*.py', 'dangerzone/container_helpers/*', 'install/common/build-image.py') }} - path: |- - share/container.tar - share/image-id.txt + path: share/container.tar fail-on-cache-miss: true - name: Build Dangerzone .deb @@ -336,7 +316,6 @@ jobs: key: v5-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/*.py', 'dangerzone/container_helpers/*', 'install/common/build-image.py') }} path: |- share/container.tar - share/image-id.txt fail-on-cache-miss: true - name: Build Dangerzone .rpm @@ -433,7 +412,6 @@ jobs: key: v5-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/*.py', 'dangerzone/container_helpers/*', 'install/common/build-image.py') }} path: |- share/container.tar - share/image-id.txt fail-on-cache-miss: true - name: Restore cached tessdata diff --git a/.github/workflows/release-container-image.yml b/.github/workflows/release-container-image.yml index da63204..98ff66e 100644 --- a/.github/workflows/release-container-image.yml +++ b/.github/workflows/release-container-image.yml @@ -18,5 +18,6 @@ jobs: registry_user: ${{ github.actor }} image_name: dangerzone/dangerzone reproduce: true + sign: false secrets: registry_token: ${{ secrets.GITHUB_TOKEN }} From b78f30527c9c80f4bc1b2164137709bbe2b22015 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Fri, 25 Apr 2025 17:23:06 +0200 Subject: [PATCH 24/24] Add image_uri output in the build-push-image workflow And use it when getting the container image to build `.rpm` and `.deb` packages. --- .github/workflows/build-push-image.yml | 9 +++++++-- .github/workflows/ci.yml | 15 ++++++++++----- 2 files changed, 17 insertions(+), 7 deletions(-) diff --git a/.github/workflows/build-push-image.yml b/.github/workflows/build-push-image.yml index af48634..030f69d 100644 --- a/.github/workflows/build-push-image.yml +++ b/.github/workflows/build-push-image.yml @@ -29,6 +29,10 @@ on: secrets: registry_token: required: true + outputs: + image_uri: + description: "The published container image location, with the tag and checksum" + value: ${{ jobs.merge.outputs.image_uri }} jobs: lint: @@ -152,6 +156,7 @@ jobs: debian_archive_date: ${{ needs.build.outputs.debian_archive_date }} source_date_epoch: ${{ needs.build.outputs.source_date_epoch }} image: ${{ needs.build.outputs.image }} + image_uri: ${{ inputs.registry }}/${{ inputs.image_name }}:${{ needs.build.outputs.tag }}@${{ steps.image.outputs.digest_root }}" tag: ${{ needs.build.outputs.tag }} digest_root: ${{ steps.image.outputs.digest_root }} digest_amd64: ${{ steps.image.outputs.digest_amd64 }} @@ -295,6 +300,6 @@ jobs: - name: Sign container run: |- - export IMAGE_URI="${{ inputs.registry }}/${{ inputs.image_name }}:${{ needs.merge.outputs.tag }}@${{ needs.merge.outputs.digest_root }}" - cosign sign -d --yes --key=${{ inputs.key_name }}.key "$IMAGE_URI" + export IMAGE_URI="${{ needs.merge.image_uri }}" + cosign sign --yes --key=${{ inputs.key_name }}.key "$IMAGE_URI" shell: bash diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a862710..d2f8eac 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -205,13 +205,18 @@ jobs: id: date run: echo "date=$(date +'%Y-%m-%d')" >> $GITHUB_OUTPUT - - name: Restore container cache - uses: actions/cache/restore@v4 + - name: Install Cosign + uses: sigstore/cosign-installer@d7d6bc7722e3daa8354c50bcb52f4837da5e9b6a with: - key: v5-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/*.py', 'dangerzone/container_helpers/*', 'install/common/build-image.py') }} - path: share/container.tar - fail-on-cache-miss: true + cosign-release: 'v2.5.0' + - name: Get the container image from the registry + run: |- + cosign save ${{ needs.build-container-image.outputs.image_uri }} --dir tmp + cd tmp + tar -cvf ../share/container.tar + cd .. + - name: Build Dangerzone .deb run: | ./dev_scripts/env.py --distro ${{ matrix.distro }} \