From 81ee267591910b0ccd1af0a593864a4dcc1f2803 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Tue, 11 Feb 2025 18:13:39 +0100 Subject: [PATCH 01/31] Add a `dangerzone-image` CLI script It contains utilities to interact with OCI registries, like getting the list of published tags and getting the content of a manifest. It does so via the use of the Docker Registry API v2 [0]. The script has been added to the `dev_scripts`, and is also installed on the system under `dangerzone-image`. [0] https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry --- dangerzone/updater/__init__.py | 3 + dangerzone/updater/cli.py | 42 ++++++++++++ dangerzone/updater/errors.py | 10 +++ dangerzone/updater/registry.py | 118 +++++++++++++++++++++++++++++++++ dev_scripts/dangerzone-image | 13 ++++ pyproject.toml | 1 + 6 files changed, 187 insertions(+) create mode 100644 dangerzone/updater/__init__.py create mode 100644 dangerzone/updater/cli.py create mode 100644 dangerzone/updater/errors.py create mode 100644 dangerzone/updater/registry.py create mode 100755 dev_scripts/dangerzone-image diff --git a/dangerzone/updater/__init__.py b/dangerzone/updater/__init__.py new file mode 100644 index 0000000..3988bf1 --- /dev/null +++ b/dangerzone/updater/__init__.py @@ -0,0 +1,3 @@ +import logging + +log = logging.getLogger(__name__) diff --git a/dangerzone/updater/cli.py b/dangerzone/updater/cli.py new file mode 100644 index 0000000..1c9f85b --- /dev/null +++ b/dangerzone/updater/cli.py @@ -0,0 +1,42 @@ +#!/usr/bin/python + +import logging + +import click + +from . import attestations, errors, log, registry, signatures + +DEFAULT_REPOSITORY = "freedomofpress/dangerzone" +DEFAULT_BRANCH = "main" +DEFAULT_IMAGE_NAME = "ghcr.io/freedomofpress/dangerzone/dangerzone" + + +@click.group() +@click.option("--debug", is_flag=True) +def main(debug: bool) -> None: + if debug: + click.echo("Debug mode enabled") + level = logging.DEBUG + else: + level = logging.INFO + logging.basicConfig(level=level) + + +@main.command() +@click.argument("image") +def list_remote_tags(image: str) -> None: + """List the tags available for a given image.""" + click.echo(f"Existing tags for {image}") + for tag in registry.list_tags(image): + click.echo(tag) + + +@main.command() +@click.argument("image") +def get_manifest(image: str) -> None: + """Retrieves a remote manifest for a given image and displays it.""" + click.echo(registry.get_manifest(image).content) + + +if __name__ == "__main__": + main() diff --git a/dangerzone/updater/errors.py b/dangerzone/updater/errors.py new file mode 100644 index 0000000..1587e73 --- /dev/null +++ b/dangerzone/updater/errors.py @@ -0,0 +1,10 @@ +class UpdaterError(Exception): + pass + + +class ImageNotFound(UpdaterError): + pass + + +class RegistryError(UpdaterError): + pass diff --git a/dangerzone/updater/registry.py b/dangerzone/updater/registry.py new file mode 100644 index 0000000..a5dd1db --- /dev/null +++ b/dangerzone/updater/registry.py @@ -0,0 +1,118 @@ +import re +from collections import namedtuple +from hashlib import sha256 +from typing import Dict, Optional, Tuple + +import requests + +from . import errors, log + +__all__ = [ + "get_manifest_digest", + "list_tags", + "get_manifest", + "parse_image_location", +] + +SIGSTORE_BUNDLE = "application/vnd.dev.sigstore.bundle.v0.3+json" +IMAGE_INDEX_MEDIA_TYPE = "application/vnd.oci.image.index.v1+json" +ACCEPT_MANIFESTS_HEADER = ",".join( + [ + "application/vnd.docker.distribution.manifest.v1+json", + "application/vnd.docker.distribution.manifest.v1+prettyjws", + "application/vnd.docker.distribution.manifest.v2+json", + "application/vnd.oci.image.manifest.v1+json", + "application/vnd.docker.distribution.manifest.list.v2+json", + IMAGE_INDEX_MEDIA_TYPE, + ] +) + + +Image = namedtuple("Image", ["registry", "namespace", "image_name", "tag"]) + + +def parse_image_location(input_string: str) -> Image: + """Parses container image location into an Image namedtuple""" + pattern = ( + r"^" + r"(?P[a-zA-Z0-9.-]+)/" + r"(?P[a-zA-Z0-9-]+)/" + r"(?P[^:]+)" + r"(?::(?P[a-zA-Z0-9.-]+))?" + r"$" + ) + match = re.match(pattern, input_string) + if not match: + raise ValueError("Malformed image location") + return Image( + registry=match.group("registry"), + namespace=match.group("namespace"), + image_name=match.group("image_name"), + tag=match.group("tag") or "latest", + ) + + +def _get_auth_header(image) -> Dict[str, str]: + auth_url = f"https://{image.registry}/token" + response = requests.get( + auth_url, + params={ + "service": f"{image.registry}", + "scope": f"repository:{image.namespace}/{image.image_name}:pull", + }, + ) + response.raise_for_status() + token = response.json()["token"] + return {"Authorization": f"Bearer {token}"} + + +def _url(image): + return f"https://{image.registry}/v2/{image.namespace}/{image.image_name}" + + +def list_tags(image_str: str) -> list: + image = parse_image_location(image_str) + url = f"{_url(image)}/tags/list" + response = requests.get(url, headers=_get_auth_header(image)) + response.raise_for_status() + tags = response.json().get("tags", []) + return tags + + +def get_manifest(image_str) -> requests.Response: + """Get manifest information for a specific tag""" + image = parse_image_location(image_str) + manifest_url = f"{_url(image)}/manifests/{image.tag}" + headers = { + "Accept": ACCEPT_MANIFESTS_HEADER, + } + headers.update(_get_auth_header(image)) + + response = requests.get(manifest_url, headers=headers) + response.raise_for_status() + return response + + +def list_manifests(image_str) -> list: + return get_manifest(image_str).json().get("manifests") + + +def get_blob(image, digest: str) -> requests.Response: + response = requests.get( + f"{_url(image)}/blobs/{digest}", + headers={ + "Authorization": f"Bearer {_get_auth_token(image)}", + }, + ) + response.raise_for_status() + return response + + +def get_manifest_digest( + image_str: str, tag_manifest_content: Optional[bytes] = None +) -> str: + image = parse_image_location(image_str) + if not tag_manifest_content: + tag_manifest_content = get_manifest(image).content + + return sha256(tag_manifest_content).hexdigest() diff --git a/dev_scripts/dangerzone-image b/dev_scripts/dangerzone-image new file mode 100755 index 0000000..5467207 --- /dev/null +++ b/dev_scripts/dangerzone-image @@ -0,0 +1,13 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +import os +import sys + +# Load dangerzone module and resources from the source code tree +sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) +sys.dangerzone_dev = True + +from dangerzone.updater import cli + +cli.main() diff --git a/pyproject.toml b/pyproject.toml index 4bb4bb4..58093a4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,6 +34,7 @@ shiboken6 = [ [tool.poetry.scripts] dangerzone = 'dangerzone:main' dangerzone-cli = 'dangerzone:main' +dangerzone-image = "dangerzone.updater.cli:main" # Dependencies required for packaging the code on various platforms. [tool.poetry.group.package.dependencies] From 3d28ae2eee158551d34e86d7751633924dd50776 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Tue, 11 Feb 2025 18:22:43 +0100 Subject: [PATCH 02/31] Download and verify cosign signatures Signatures are stored in the OCI Manifest v2 registry [0], and are expected to follow the Cosign Signature Specification [0] The following CLI utilities are provided with `dangerzone-image`: For checking new container images, upgrading them and downloading them: - `upgrade` allows to upgrade the current installed image to the last one available on the OCI registry, downloading and storing the signatures in the process. - `verify-local` allows the verify the currently installed image against downloaded signatures and public key. To prepare and install archives on air-gapped environments: - `prepare-archive` helps to prepare an archive to install on another machine - `load-archive` helps upgrade the local image to the archive given in argument. Signatures are stored locally using the format provided by `cosign download signature`, and the Rekor log index is used to ensure the requested-to-install container image is fresher than the one already present on the system. [0] https://github.com/sigstore/cosign/blob/main/specs/SIGNATURE_SPEC.md --- dangerzone/container_utils.py | 88 ++++++- dangerzone/updater/cli.py | 63 +++++ dangerzone/updater/cosign.py | 32 +++ dangerzone/updater/errors.py | 44 ++++ dangerzone/updater/signatures.py | 433 +++++++++++++++++++++++++++++++ 5 files changed, 646 insertions(+), 14 deletions(-) create mode 100644 dangerzone/updater/cosign.py create mode 100644 dangerzone/updater/signatures.py diff --git a/dangerzone/container_utils.py b/dangerzone/container_utils.py index 99c9a08..863a871 100644 --- a/dangerzone/container_utils.py +++ b/dangerzone/container_utils.py @@ -3,23 +3,22 @@ import logging import platform import shutil import subprocess -from typing import List, Tuple +from typing import List, Optional, Tuple from . import errors from .util import get_resource_path, get_subprocess_startupinfo -CONTAINER_NAME = "dangerzone.rocks/dangerzone" +OLD_CONTAINER_NAME = "dangerzone.rocks/dangerzone" +CONTAINER_NAME = "ghcr.io/freedomofpress/dangerzone/dangerzone" log = logging.getLogger(__name__) def get_runtime_name() -> str: if platform.system() == "Linux": - runtime_name = "podman" - else: - # Windows, Darwin, and unknown use docker for now, dangerzone-vm eventually - runtime_name = "docker" - return runtime_name + return "podman" + # Windows, Darwin, and unknown use docker for now, dangerzone-vm eventually + return "docker" def get_runtime_version() -> Tuple[int, int]: @@ -112,13 +111,7 @@ def delete_image_tag(tag: str) -> None: ) -def get_expected_tag() -> str: - """Get the tag of the Dangerzone image tarball from the image-id.txt file.""" - with open(get_resource_path("image-id.txt")) as f: - return f.read().strip() - - -def load_image_tarball() -> None: +def load_image_tarball_from_gzip() -> None: log.info("Installing Dangerzone container image...") p = subprocess.Popen( [get_runtime(), "load"], @@ -147,3 +140,70 @@ def load_image_tarball() -> None: ) log.info("Successfully installed container image from") + + +def load_image_tarball_from_tar(tarball_path: str) -> None: + cmd = [get_runtime(), "load", "-i", tarball_path] + subprocess.run(cmd, startupinfo=get_subprocess_startupinfo(), check=True) + + log.info("Successfully installed container image from %s", tarball_path) + + +def tag_image_by_digest(digest: str, tag: str) -> None: + """Tag a container image by digest. + The sha256: prefix should be omitted from the digest. + """ + image_id = get_image_id_by_digest(digest) + cmd = [get_runtime(), "tag", image_id, tag] + log.debug(" ".join(cmd)) + subprocess.run(cmd, startupinfo=get_subprocess_startupinfo(), check=True) + + +def get_image_id_by_digest(digest: str) -> str: + """Get an image ID from a digest. + The sha256: prefix should be omitted from the digest. + """ + cmd = [ + get_runtime(), + "images", + "-f", + f"digest=sha256:{digest}", + "--format", + "{{.Id}}", + ] + log.debug(" ".join(cmd)) + process = subprocess.run( + cmd, startupinfo=get_subprocess_startupinfo(), check=True, capture_output=True + ) + # In case we have multiple lines, we only want the first one. + return process.stdout.decode().strip().split("\n")[0] + + +def container_pull(image: str) -> bool: + """Pull a container image from a registry.""" + cmd = [get_runtime_name(), "pull", f"{image}"] + process = subprocess.Popen(cmd, stdout=subprocess.PIPE) + process.communicate() + return process.returncode == 0 + + +def get_local_image_digest(image: str) -> Optional[str]: + """ + Returns a image hash from a local image name + """ + # Get the image hash from the podman images command, as + # podman inspect returns a the digest of the architecture-bound image + cmd = [get_runtime_name(), "images", image, "--format", "{{.Digest}}"] + log.debug(" ".join(cmd)) + try: + result = subprocess.run(cmd, capture_output=True, check=True) + lines = result.stdout.decode().strip().split("\n") + if len(lines) != 1: + raise errors.MultipleImagesFoundException( + f"Expected a single line of output, got {len(lines)} lines" + ) + return lines[0].replace("sha256:", "") + except subprocess.CalledProcessError as e: + return None + else: + return result.stdout.strip().decode().strip("sha256:") diff --git a/dangerzone/updater/cli.py b/dangerzone/updater/cli.py index 1c9f85b..9eab01e 100644 --- a/dangerzone/updater/cli.py +++ b/dangerzone/updater/cli.py @@ -22,6 +22,69 @@ def main(debug: bool) -> None: logging.basicConfig(level=level) +@main.command() +@click.argument("image", default=DEFAULT_IMAGE_NAME) +@click.option("--pubkey", default=signatures.DEFAULT_PUBKEY_LOCATION) +def upgrade(image: str, pubkey: str) -> None: + """Upgrade the image to the latest signed version.""" + manifest_digest = registry.get_manifest_digest(image) + try: + is_upgraded = signatures.upgrade_container_image(image, manifest_digest, pubkey) + if is_upgraded: + click.echo(f"✅ The local image {image} has been upgraded") + click.echo(f"✅ The image has been signed with {pubkey}") + click.echo(f"✅ Signatures has been verified and stored locally") + + except errors.ImageAlreadyUpToDate as e: + click.echo(f"✅ {e}") + raise click.Abort() + + +@main.command() +@click.argument("image_filename") +@click.option("--pubkey", default=signatures.DEFAULT_PUBKEY_LOCATION) +def load_archive(image_filename: str, pubkey: str) -> None: + """Upgrade the local image to the one in the archive.""" + try: + loaded_image = signatures.upgrade_container_image_airgapped( + image_filename, pubkey + ) + click.echo( + f"✅ Installed image {image_filename} on the system as {loaded_image}" + ) + except errors.ImageAlreadyUpToDate as e: + click.echo(f"✅ {e}") + raise click.Abort() + + +@main.command() +@click.argument("image") +@click.option("--output", default="dangerzone-airgapped.tar") +def prepare_archive(image: str, output: str) -> None: + """Prepare an archive to upgrade the dangerzone image on an airgapped environment.""" + signatures.prepare_airgapped_archive(image, output) + click.echo(f"✅ Archive {output} created") + + +@main.command() +@click.argument("image", default=DEFAULT_IMAGE_NAME) +@click.option("--pubkey", default=signatures.DEFAULT_PUBKEY_LOCATION) +def verify_local(image: str, pubkey: str) -> None: + """ + Verify the local image signature against a public key and the stored signatures. + """ + # XXX remove a potentiel :tag + if signatures.verify_local_image(image, pubkey): + click.echo( + ( + f"Verifying the local image:\n\n" + f"pubkey: {pubkey}\n" + f"image: {image}\n\n" + f"✅ The local image {image} has been signed with {pubkey}" + ) + ) + + @main.command() @click.argument("image") def list_remote_tags(image: str) -> None: diff --git a/dangerzone/updater/cosign.py b/dangerzone/updater/cosign.py new file mode 100644 index 0000000..9abcc84 --- /dev/null +++ b/dangerzone/updater/cosign.py @@ -0,0 +1,32 @@ +import subprocess + +from . import errors, log + + +def ensure_installed() -> None: + try: + subprocess.run(["cosign", "version"], capture_output=True, check=True) + except subprocess.CalledProcessError: + raise errors.CosignNotInstalledError() + + +def verify_local_image(oci_image_folder: str, pubkey: str) -> bool: + """Verify the given path against the given public key""" + + ensure_installed() + cmd = [ + "cosign", + "verify", + "--key", + pubkey, + "--offline", + "--local-image", + oci_image_folder, + ] + log.debug(" ".join(cmd)) + result = subprocess.run(cmd, capture_output=True) + if result.returncode == 0: + log.info("Signature verified") + return True + log.info("Failed to verify signature", result.stderr) + return False diff --git a/dangerzone/updater/errors.py b/dangerzone/updater/errors.py index 1587e73..d302975 100644 --- a/dangerzone/updater/errors.py +++ b/dangerzone/updater/errors.py @@ -2,9 +2,53 @@ class UpdaterError(Exception): pass +class ImageAlreadyUpToDate(UpdaterError): + pass + + class ImageNotFound(UpdaterError): pass +class SignatureError(UpdaterError): + pass + + class RegistryError(UpdaterError): pass + + +class AirgappedImageDownloadError(UpdaterError): + pass + + +class NoRemoteSignatures(SignatureError): + pass + + +class SignatureVerificationError(SignatureError): + pass + + +class SignatureExtractionError(SignatureError): + pass + + +class SignaturesFolderDoesNotExist(SignatureError): + pass + + +class InvalidSignatures(SignatureError): + pass + + +class SignatureMismatch(SignatureError): + pass + + +class LocalSignatureNotFound(SignatureError): + pass + + +class CosignNotInstalledError(SignatureError): + pass diff --git a/dangerzone/updater/signatures.py b/dangerzone/updater/signatures.py new file mode 100644 index 0000000..e5f1189 --- /dev/null +++ b/dangerzone/updater/signatures.py @@ -0,0 +1,433 @@ +import json +import platform +import re +import subprocess +import tarfile +from base64 import b64decode, b64encode +from functools import reduce +from hashlib import sha256 +from io import BytesIO +from pathlib import Path +from tempfile import NamedTemporaryFile, TemporaryDirectory +from typing import Dict, List, Optional, Tuple + +from .. import container_utils as runtime +from ..util import get_resource_path +from . import cosign, errors, log, registry + +try: + import platformdirs +except ImportError: + import appdirs as platformdirs # type: ignore[no-redef] + + +def get_config_dir() -> Path: + return Path(platformdirs.user_config_dir("dangerzone")) + + +# XXX Store this somewhere else. +DEFAULT_PUBKEY_LOCATION = get_resource_path("freedomofpress-dangerzone-pub.key") +SIGNATURES_PATH = get_config_dir() / "signatures" +LAST_LOG_INDEX = SIGNATURES_PATH / "last_log_index" + +__all__ = [ + "verify_signature", + "load_signatures", + "store_signatures", + "verify_offline_image_signature", +] + + +def signature_to_bundle(sig: Dict) -> Dict: + """Convert a cosign-download signature to the format expected by cosign bundle.""" + bundle = sig["Bundle"] + payload = bundle["Payload"] + return { + "base64Signature": sig["Base64Signature"], + "Payload": sig["Payload"], + "cert": sig["Cert"], + "chain": sig["Chain"], + "rekorBundle": { + "SignedEntryTimestamp": bundle["SignedEntryTimestamp"], + "Payload": { + "body": payload["body"], + "integratedTime": payload["integratedTime"], + "logIndex": payload["logIndex"], + "logID": payload["logID"], + }, + }, + "RFC3161Timestamp": sig["RFC3161Timestamp"], + } + + +def verify_signature(signature: dict, image_digest: str, pubkey: str) -> bool: + """Verify a signature against a given public key""" + # XXX - Also verfy the identity/docker-reference field against the expected value + # e.g. ghcr.io/freedomofpress/dangerzone/dangerzone + + cosign.ensure_installed() + signature_bundle = signature_to_bundle(signature) + + payload_bytes = b64decode(signature_bundle["Payload"]) + payload_digest = json.loads(payload_bytes)["critical"]["image"][ + "docker-manifest-digest" + ] + if payload_digest != f"sha256:{image_digest}": + raise errors.SignatureMismatch( + f"The signature does not match the image digest ({payload_digest}, {image_digest})" + ) + + with ( + NamedTemporaryFile(mode="w") as signature_file, + NamedTemporaryFile(mode="bw") as payload_file, + ): + json.dump(signature_bundle, signature_file) + signature_file.flush() + + payload_file.write(payload_bytes) + payload_file.flush() + + cmd = [ + "cosign", + "verify-blob", + "--key", + pubkey, + "--bundle", + signature_file.name, + payload_file.name, + ] + log.debug(" ".join(cmd)) + result = subprocess.run(cmd, capture_output=True) + if result.returncode != 0: + # XXX Raise instead? + log.debug("Failed to verify signature", result.stderr) + return False + if result.stderr == b"Verified OK\n": + log.debug("Signature verified") + return True + return False + + +def is_update_available(image: str) -> (bool, Optional[str]): + remote_digest = registry.get_manifest_digest(image) + local_digest = runtime.get_local_image_digest(image) + log.debug("Remote digest: %s", remote_digest) + log.debug("Local digest: %s", local_digest) + has_update = remote_digest != local_digest + if has_update: + return True, remote_digest + return False, None + + +def verify_signatures( + signatures: List[Dict], + image_digest: str, + pubkey: str, +) -> bool: + for signature in signatures: + if not verify_signature(signature, image_digest, pubkey): + raise errors.SignatureVerificationError() + return True + + +def get_last_log_index() -> int: + SIGNATURES_PATH.mkdir(parents=True, exist_ok=True) + if not LAST_LOG_INDEX.exists(): + return 0 + + with open(LAST_LOG_INDEX) as f: + return int(f.read()) + + +def get_log_index_from_signatures(signatures: List[Dict]) -> int: + return reduce( + lambda acc, sig: max(acc, sig["Bundle"]["Payload"]["logIndex"]), signatures, 0 + ) + + +def write_log_index(log_index: int) -> None: + last_log_index_path = SIGNATURES_PATH / "last_log_index" + + with open(log_index, "w") as f: + f.write(str(log_index)) + + +def _get_blob(tmpdir: str, digest: str) -> Path: + return Path(tmpdir) / "blobs" / "sha256" / digest.replace("sha256:", "") + + +def upgrade_container_image_airgapped(container_tar: str, pubkey: str) -> str: + """ + Verify the given archive against its self-contained signatures, then + upgrade the image and retag it to the expected tag. + + Right now, the archive is extracted and reconstructed, requiring some space + on the filesystem. + + :return: The loaded image name + """ + + # XXX Use a memory buffer instead of the filesystem + with TemporaryDirectory() as tmpdir: + + def _get_signature_filename(manifests: List[Dict]) -> Path: + for manifest in manifests: + if ( + manifest["annotations"].get("kind") + == "dev.cosignproject.cosign/sigs" + ): + return _get_blob(tmpdir, manifest["digest"]) + raise errors.SignatureExtractionError() + + with tarfile.open(container_tar, "r") as archive: + archive.extractall(tmpdir) + + if not cosign.verify_local_image(tmpdir, pubkey): + raise errors.SignatureVerificationError() + + # Remove the signatures from the archive, otherwise podman is not able to load it + with open(Path(tmpdir) / "index.json") as f: + index_json = json.load(f) + + signature_filename = _get_signature_filename(index_json["manifests"]) + + index_json["manifests"] = [ + manifest + for manifest in index_json["manifests"] + if manifest["annotations"].get("kind") + in ("dev.cosignproject.cosign/imageIndex", "dev.cosignproject.cosign/image") + ] + + with open(signature_filename, "rb") as f: + image_name, signatures = convert_oci_images_signatures(json.load(f), tmpdir) + log.info(f"Found image name: {image_name}") + + # Ensure that we only upgrade if the log index is higher than the last known one + incoming_log_index = get_log_index_from_signatures(signatures) + last_log_index = get_last_log_index() + + if incoming_log_index < last_log_index: + raise errors.InvalidLogIndex( + "The log index is not higher than the last known one" + ) + + image_digest = index_json["manifests"][0].get("digest").replace("sha256:", "") + + # Write the new index.json to the temp folder + with open(Path(tmpdir) / "index.json", "w") as f: + json.dump(index_json, f) + + with NamedTemporaryFile(suffix=".tar") as temporary_tar: + with tarfile.open(temporary_tar.name, "w") as archive: + # The root is the tmpdir + archive.add(Path(tmpdir) / "index.json", arcname="index.json") + archive.add(Path(tmpdir) / "oci-layout", arcname="oci-layout") + archive.add(Path(tmpdir) / "blobs", arcname="blobs") + + runtime.load_image_tarball_from_tar(temporary_tar.name) + runtime.tag_image_by_digest(image_digest, image_name) + + store_signatures(signatures, image_digest, pubkey) + return image_name + + +def convert_oci_images_signatures( + signatures_manifest: List[Dict], tmpdir: str +) -> (str, List[Dict]): + def _to_cosign_signature(layer: Dict) -> Dict: + signature = layer["annotations"]["dev.cosignproject.cosign/signature"] + bundle = json.loads(layer["annotations"]["dev.sigstore.cosign/bundle"]) + payload_body = json.loads(b64decode(bundle["Payload"]["body"])) + + payload_location = _get_blob(tmpdir, layer["digest"]) + with open(payload_location, "rb") as f: + payload_b64 = b64encode(f.read()).decode() + + return { + "Base64Signature": payload_body["spec"]["signature"]["content"], + "Payload": payload_b64, + "Cert": None, + "Chain": None, + "Bundle": bundle, + "RFC3161Timestamp": None, + } + + layers = signatures_manifest["layers"] + signatures = [_to_cosign_signature(layer) for layer in layers] + + payload_location = _get_blob(tmpdir, layers[0]["digest"]) + with open(payload_location, "r") as f: + payload = json.load(f) + image_name = payload["critical"]["identity"]["docker-reference"] + + return image_name, signatures + + +def get_file_digest(file: Optional[str] = None, content: Optional[bytes] = None) -> str: + """Get the sha256 digest of a file or content""" + if not file and not content: + raise errors.UpdaterError("No file or content provided") + if file: + with open(file, "rb") as f: + content = f.read() + if content: + return sha256(content).hexdigest() + return "" + + +def load_signatures(image_digest: str, pubkey: str) -> List[Dict]: + """ + Load signatures from the local filesystem + + See store_signatures() for the expected format. + """ + pubkey_signatures = SIGNATURES_PATH / get_file_digest(pubkey) + if not pubkey_signatures.exists(): + msg = ( + f"Cannot find a '{pubkey_signatures}' folder." + "You might need to download the image signatures first." + ) + raise errors.SignaturesFolderDoesNotExist(msg) + + with open(pubkey_signatures / f"{image_digest}.json") as f: + log.debug("Loading signatures from %s", f.name) + return json.load(f) + + +def store_signatures(signatures: list[Dict], image_digest: str, pubkey: str) -> None: + """ + Store signatures locally in the SIGNATURE_PATH folder, like this: + + ~/.config/dangerzone/signatures/ + ├── + │ ├── .json + │ ├── .json + └── last_log_index + + The last_log_index file is used to keep track of the last log index + processed by the updater. + + The format used in the `.json` file is the one of `cosign download + signature`, which differs from the "bundle" one used afterwards. + + It can be converted to the one expected by cosign verify --bundle with + the `signature_to_bundle()` function. + """ + + def _get_digest(sig: Dict) -> str: + payload = json.loads(b64decode(sig["Payload"])) + return payload["critical"]["image"]["docker-manifest-digest"] + + # All the signatures should share the same digest. + digests = list(map(_get_digest, signatures)) + if len(set(digests)) != 1: + raise errors.InvalidSignatures("Signatures do not share the same image digest") + + if f"sha256:{image_digest}" != digests[0]: + raise errors.SignatureMismatch( + f"Signatures do not match the given image digest ({image_digest}, {digests[0]})" + ) + + pubkey_signatures = SIGNATURES_PATH / get_file_digest(pubkey) + pubkey_signatures.mkdir(parents=True, exist_ok=True) + + with open(pubkey_signatures / f"{image_digest}.json", "w") as f: + log.info( + f"Storing signatures for {image_digest} in {pubkey_signatures}/{image_digest}.json" + ) + json.dump(signatures, f) + + +def verify_local_image(image: str, pubkey: str) -> bool: + """ + Verifies that a local image has a valid signature + """ + log.info(f"Verifying local image {image} against pubkey {pubkey}") + try: + image_digest = runtime.get_local_image_digest(image) + except subprocess.CalledProcessError: + raise errors.ImageNotFound(f"The image {image} does not exist locally") + + log.debug(f"Image digest: {image_digest}") + signatures = load_signatures(image_digest, pubkey) + if len(signatures) < 1: + raise errors.LocalSignatureNotFound("No signatures found") + + for signature in signatures: + if not verify_signature(signature, image_digest, pubkey): + msg = f"Unable to verify signature for {image} with pubkey {pubkey}" + raise errors.SignatureVerificationError(msg) + return True + + +def get_remote_signatures(image: str, digest: str) -> List[Dict]: + """Retrieve the signatures from the registry, via `cosign download`.""" + cosign.ensure_installed() + + # XXX: try/catch here + process = subprocess.run( + ["cosign", "download", "signature", f"{image}@sha256:{digest}"], + capture_output=True, + check=True, + ) + + # XXX: Check the output first. + # Remove the last return, split on newlines, convert from JSON + signatures_raw = process.stdout.decode("utf-8").strip().split("\n") + signatures = list(map(json.loads, signatures_raw)) + if len(signatures) < 1: + raise errors.NoRemoteSignatures("No signatures found for the image") + return signatures + + +def prepare_airgapped_archive(image_name, destination): + if "@sha256:" not in image_name: + raise errors.AirgappedImageDownloadError( + "The image name must include a digest, e.g. ghcr.io/freedomofpress/dangerzone/dangerzone@sha256:123456" + ) + + cosign.ensure_installed() + # Get the image from the registry + + with TemporaryDirectory() as tmpdir: + msg = f"Downloading image {image_name}. \nIt might take a while." + log.info(msg) + + process = subprocess.run( + ["cosign", "save", image_name, "--dir", tmpdir], + capture_output=True, + check=True, + ) + if process.returncode != 0: + raise errors.AirgappedImageDownloadError() + + with tarfile.open(destination, "w") as archive: + archive.add(tmpdir, arcname=".") + + +def upgrade_container_image(image: str, manifest_digest: str, pubkey: str) -> bool: + """Verify and upgrade the image to the latest, if signed.""" + update_available, _ = is_update_available(image) + if not update_available: + raise errors.ImageAlreadyUpToDate("The image is already up to date") + + signatures = get_remote_signatures(image, manifest_digest) + verify_signatures(signatures, manifest_digest, pubkey) + + # Ensure that we only upgrade if the log index is higher than the last known one + incoming_log_index = get_log_index_from_signatures(signatures) + last_log_index = get_last_log_index() + + if incoming_log_index < last_log_index: + raise errors.InvalidLogIndex( + "The log index is not higher than the last known one" + ) + + # let's upgrade the image + # XXX Use the image digest here to avoid race conditions + upgraded = runtime.container_pull(image) + + # At this point, the signatures are verified + # We store the signatures just now to avoid storing unverified signatures + store_signatures(signatures, manifest_digest, pubkey) + return upgraded From 197325b2660f7e067d1e093fe2160afff89b870e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Tue, 11 Feb 2025 19:13:18 +0100 Subject: [PATCH 03/31] Add the ability to download diffoci for multiple platforms The hash list provided on the Github releases page is now bundled in the `reproduce-image.py` script, and the proper hashes are checked after download. --- dangerzone/errors.py | 4 ++++ dangerzone/updater/__init__.py | 7 ++++++ dangerzone/util.py | 2 +- dev_scripts/reproduce-image.py | 43 ++++++++++++++++++++++++++++++---- 4 files changed, 50 insertions(+), 6 deletions(-) diff --git a/dangerzone/errors.py b/dangerzone/errors.py index d8e1759..5abe187 100644 --- a/dangerzone/errors.py +++ b/dangerzone/errors.py @@ -126,6 +126,10 @@ class ImageNotPresentException(Exception): pass +class MultipleImagesFoundException(Exception): + pass + + class ImageInstallationException(Exception): pass diff --git a/dangerzone/updater/__init__.py b/dangerzone/updater/__init__.py index 3988bf1..57bfa1c 100644 --- a/dangerzone/updater/__init__.py +++ b/dangerzone/updater/__init__.py @@ -1,3 +1,10 @@ import logging log = logging.getLogger(__name__) + +from .signatures import ( + DEFAULT_PUBKEY_LOCATION, + is_update_available, + upgrade_container_image, + verify_local_image, +) diff --git a/dangerzone/util.py b/dangerzone/util.py index bcad701..90f77cc 100644 --- a/dangerzone/util.py +++ b/dangerzone/util.py @@ -8,7 +8,7 @@ import unicodedata try: import platformdirs except ImportError: - import appdirs as platformdirs + import appdirs as platformdirs # type: ignore[no-redef] def get_config_dir() -> str: diff --git a/dev_scripts/reproduce-image.py b/dev_scripts/reproduce-image.py index 0f757ae..5c7aa77 100755 --- a/dev_scripts/reproduce-image.py +++ b/dev_scripts/reproduce-image.py @@ -4,6 +4,7 @@ import argparse import hashlib import logging import pathlib +import platform import stat import subprocess import sys @@ -11,8 +12,20 @@ import urllib.request logger = logging.getLogger(__name__) -DIFFOCI_URL = "https://github.com/reproducible-containers/diffoci/releases/download/v0.1.5/diffoci-v0.1.5.linux-amd64" -DIFFOCI_CHECKSUM = "01d25fe690196945a6bd510d30559338aa489c034d3a1b895a0d82a4b860698f" +DIFFOCI_VERSION = "v0.1.5" +# https://github.com/reproducible-containers/diffoci/releases/download/v0.1.5/SHA256SUMS +DIFFOCI_CHECKSUMS = """ +ae171821b18c3b9e5cd1953323e79fe5ec1e972e9586474b18227b2cd052e695 diffoci-v0.1.5.darwin-amd64 +fadabdac9be45fb3dfe2a53986422e53dcc6e1fdc8062713c5760e8959a37c2b diffoci-v0.1.5.darwin-arm64 +01d25fe690196945a6bd510d30559338aa489c034d3a1b895a0d82a4b860698f diffoci-v0.1.5.linux-amd64 +5cbc5d13b51183e2988ee0f406d428eb846d51b7c2c12ae17d0775371f43103e diffoci-v0.1.5.linux-arm-v7 +2d067bd1af8a26b2c206c6bf2bde9bcb21062ddb5dc575e110e0e1a93d0d065f diffoci-v0.1.5.linux-arm64 +0923f0c01f270c596fea9f84e529af958d6caba3fa0f6bf4f03df2a12f23b3fc diffoci-v0.1.5.linux-ppc64le +5821cbc299a90caa167c3a91465292907077ca1123375f88165a842b8970e710 diffoci-v0.1.5.linux-riscv64 +917d7f23d2bd8fcc755cb2f722fc50ffd83389e04838c3b6e9c3463ea96a9be1 diffoci-v0.1.5.linux-s390x +""" +DIFFOCI_URL = "https://github.com/reproducible-containers/diffoci/releases/download/{version}/diffoci-{version}.{arch}" + DIFFOCI_PATH = ( pathlib.Path.home() / ".local" / "share" / "dangerzone-dev" / "helpers" / "diffoci" ) @@ -44,12 +57,31 @@ def git_verify(commit, source): ) +def get_platform_arch(): + system = platform.system().lower() + arch = platform.machine().lower() + if arch == "x86_64": + arch = "amd64" + return f"{system}-{arch}" + + +def parse_checksums(): + lines = [ + line.replace(f"diffoci-{DIFFOCI_VERSION}.", "").split(" ") + for line in DIFFOCI_CHECKSUMS.split("\n") + if line + ] + return {arch: checksum for checksum, arch in lines} + + def diffoci_hash_matches(diffoci): """Check if the hash of the downloaded diffoci bin matches the expected one.""" + arch = get_platform_arch() + expected_checksum = parse_checksums().get(arch) m = hashlib.sha256() m.update(diffoci) diffoci_checksum = m.hexdigest() - return diffoci_checksum == DIFFOCI_CHECKSUM + return diffoci_checksum == expected_checksum def diffoci_is_installed(): @@ -66,7 +98,9 @@ def diffoci_is_installed(): def diffoci_download(): """Download the diffoci tool, based on a URL and its checksum.""" - with urllib.request.urlopen(DIFFOCI_URL) as f: + download_url = DIFFOCI_URL.format(version=DIFFOCI_VERSION, arch=get_platform_arch()) + logger.info(f"Downloading diffoci helper from {download_url}") + with urllib.request.urlopen(download_url) as f: diffoci_bin = f.read() if not diffoci_hash_matches(diffoci_bin): @@ -153,7 +187,6 @@ def main(): git_verify(commit, args.source) if not diffoci_is_installed(): - logger.info(f"Downloading diffoci helper from {DIFFOCI_URL}") diffoci_download() tag = f"reproduce-{commit}" From f60c43f12b29bb7099c0c30324fa3f69ce86bb09 Mon Sep 17 00:00:00 2001 From: Alex Pyrgiotis Date: Tue, 11 Feb 2025 19:15:49 +0100 Subject: [PATCH 04/31] Publish and attest multi-architecture container images A new `dangerzone-image attest-provenance` script is now available, making it possible to verify the attestations of an image published on the github container registry. Container images are now build nightly and uploaded to the container registry. --- .github/workflows/release-container-image.yml | 168 ++++++++++++++++++ dangerzone/updater/attestations.py | 92 ++++++++++ dangerzone/updater/cli.py | 49 +++++ 3 files changed, 309 insertions(+) create mode 100644 .github/workflows/release-container-image.yml create mode 100644 dangerzone/updater/attestations.py diff --git a/.github/workflows/release-container-image.yml b/.github/workflows/release-container-image.yml new file mode 100644 index 0000000..cfc4081 --- /dev/null +++ b/.github/workflows/release-container-image.yml @@ -0,0 +1,168 @@ +name: Release multi-arch container image + +on: + workflow_dispatch: + push: + branches: + - main + - "test/**" + schedule: + - cron: "0 0 * * *" # Run every day at 00:00 UTC. + +env: + REGISTRY: ghcr.io/${{ github.repository_owner }} + REGISTRY_USER: ${{ github.actor }} + REGISTRY_PASSWORD: ${{ github.token }} + IMAGE_NAME: dangerzone/dangerzone + +jobs: + build: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + platform: + - linux/amd64 + - linux/arm64 + steps: + - uses: actions/checkout@v4 + + - name: Get current date + id: date + run: echo "date=$(date +'%Y%m%d')" >> $GITHUB_OUTPUT + + - name: Prepare + run: | + platform=${{ matrix.platform }} + echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV + + - name: Docker meta + id: meta + uses: docker/metadata-action@v5 + with: + images: | + ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + + - name: Login to GHCR + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Build and push by digest + id: build + uses: docker/build-push-action@v6 + with: + context: ./dangerzone/ + file: Dockerfile + build-args: | + DEBIAN_ARCHIVE_DATE=${{ steps.date.outputs.date }} + ## Remove potentially incorrect Docker provenance. + #provenance: false + platforms: ${{ matrix.platform }} + labels: ${{ steps.meta.outputs.labels }} + outputs: type=image,"name=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}",push-by-digest=true,name-canonical=true,push=true + + - name: Export digest + run: | + mkdir -p ${{ runner.temp }}/digests + digest="${{ steps.build.outputs.digest }}" + touch "${{ runner.temp }}/digests/${digest#sha256:}" + + - name: Upload digest + uses: actions/upload-artifact@v4 + with: + name: digests-${{ env.PLATFORM_PAIR }} + path: ${{ runner.temp }}/digests/* + if-no-files-found: error + retention-days: 1 + + merge: + runs-on: ubuntu-latest + needs: + - build + outputs: + digest: ${{ steps.image.outputs.digest }} + image: ${{ steps.image.outputs.image }} + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Compute image tag + id: tag + run: | + DATE=$(date +'%Y%m%d') + TAG=$(git describe --long --first-parent | tail -c +2) + echo "tag=${DATE}-${TAG}" >> $GITHUB_OUTPUT + + - name: Download digests + uses: actions/download-artifact@v4 + with: + path: ${{ runner.temp }}/digests + pattern: digests-* + merge-multiple: true + + - name: Login to GHCR + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + #- name: Docker meta + # id: meta + # uses: docker/metadata-action@v5 + # with: + # images: | + # ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + # tags: | + # type=ref,event=branch + # type=ref,event=pr + # type=semver,pattern={{version}} + # type=semver,pattern={{major}}.{{minor}} + + - name: Create manifest list and push + working-directory: ${{ runner.temp }}/digests + run: | + IMAGE=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.tag.outputs.tag }} + DIGESTS=$(printf '${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}@sha256:%s ' *) + docker buildx imagetools create -t ${IMAGE} ${DIGESTS} + + - name: Inspect image + id: image + run: | + # NOTE: Set the image as an output because the `env` context is not + # available to the inputs of a reusable workflow call. + image_name="${REGISTRY}/${IMAGE_NAME}" + echo "image=$image_name" >> "$GITHUB_OUTPUT" + docker buildx imagetools inspect ${image_name}:${{ steps.tag.outputs.tag }} + digest=$(docker buildx imagetools inspect ${image_name}:${{ steps.tag.outputs.tag }} --format "{{json .Manifest}}" | jq -r '.digest') + echo "digest=$digest" >> "$GITHUB_OUTPUT" + + # This step calls the container workflow to generate provenance and push it to + # the container registry. + provenance: + needs: + - merge + permissions: + actions: read # for detecting the Github Actions environment. + id-token: write # for creating OIDC tokens for signing. + packages: write # for uploading attestations. + uses: slsa-framework/slsa-github-generator/.github/workflows/generator_container_slsa3.yml@v2.0.0 + with: + digest: ${{ needs.merge.outputs.digest }} + image: ${{ needs.merge.outputs.image }} + registry-username: ${{ github.actor }} + secrets: + registry-password: ${{ secrets.GITHUB_TOKEN }} diff --git a/dangerzone/updater/attestations.py b/dangerzone/updater/attestations.py new file mode 100644 index 0000000..90bf152 --- /dev/null +++ b/dangerzone/updater/attestations.py @@ -0,0 +1,92 @@ +import subprocess +from tempfile import NamedTemporaryFile + +from . import cosign + +# NOTE: You can grab the SLSA attestation for an image/tag pair with the following +# commands: +# +# IMAGE=ghcr.io/apyrgio/dangerzone/dangerzone +# TAG=20250129-0.8.0-149-gbf2f5ac +# DIGEST=$(crane digest ${IMAGE?}:${TAG?}) +# ATT_MANIFEST=${IMAGE?}:${DIGEST/:/-}.att +# ATT_BLOB=${IMAGE?}@$(crane manifest ${ATT_MANIFEST?} | jq -r '.layers[0].digest') +# crane blob ${ATT_BLOB?} | jq -r '.payload' | base64 -d | jq +CUE_POLICY = r""" +// The predicateType field must match this string +predicateType: "https://slsa.dev/provenance/v0.2" + +predicate: {{ + // This condition verifies that the builder is the builder we + // expect and trust. The following condition can be used + // unmodified. It verifies that the builder is the container + // workflow. + builder: {{ + id: =~"^https://github.com/slsa-framework/slsa-github-generator/.github/workflows/generator_container_slsa3.yml@refs/tags/v[0-9]+.[0-9]+.[0-9]+$" + }} + invocation: {{ + configSource: {{ + // This condition verifies the entrypoint of the workflow. + // Replace with the relative path to your workflow in your + // repository. + entryPoint: "{workflow}" + + // This condition verifies that the image was generated from + // the source repository we expect. Replace this with your + // repository. + uri: =~"^git\\+https://github.com/{repo}@refs/heads/{branch}" + // Add a condition to check for a specific commit hash + digest: {{ + sha1: "{commit}" + }} + }} + }} +}} +""" + + +def generate_cue_policy(repo, workflow, commit, branch): + return CUE_POLICY.format(repo=repo, workflow=workflow, commit=commit, branch=branch) + + +def verify( + image_name: str, + branch: str, + commit: str, + repository: str, + workflow: str, +) -> bool: + """ + Look up the image attestation to see if the image has been built + on Github runners, and from a given repository. + """ + cosign.ensure_installed() + policy = generate_cue_policy(repository, workflow, commit, branch) + + # Put the value in files and verify with cosign + with ( + NamedTemporaryFile(mode="w", suffix=".cue") as policy_f, + ): + policy_f.write(policy) + policy_f.flush() + + # Call cosign with the temporary file paths + cmd = [ + "cosign", + "verify-attestation", + "--type", + "slsaprovenance", + "--policy", + policy_f.name, + "--certificate-oidc-issuer", + "https://token.actions.githubusercontent.com", + "--certificate-identity-regexp", + "^https://github.com/slsa-framework/slsa-github-generator/.github/workflows/generator_container_slsa3.yml@refs/tags/v[0-9]+.[0-9]+.[0-9]+$", + image_name, + ] + + result = subprocess.run(cmd, capture_output=True) + if result.returncode != 0: + error = result.stderr.decode() + raise Exception(f"Attestation cannot be verified. {error}") + return True diff --git a/dangerzone/updater/cli.py b/dangerzone/updater/cli.py index 9eab01e..42fe58c 100644 --- a/dangerzone/updater/cli.py +++ b/dangerzone/updater/cli.py @@ -101,5 +101,54 @@ def get_manifest(image: str) -> None: click.echo(registry.get_manifest(image).content) +@main.command() +@click.argument("image_name") +# XXX: Do we really want to check against this? +@click.option( + "--branch", + default=DEFAULT_BRANCH, + help="The Git branch that the image was built from", +) +@click.option( + "--commit", + required=True, + help="The Git commit the image was built from", +) +@click.option( + "--repository", + default=DEFAULT_REPOSITORY, + help="The github repository to check the attestation for", +) +@click.option( + "--workflow", + default=".github/workflows/release-container-image.yml", + help="The path of the GitHub actions workflow this image was created from", +) +def attest_provenance( + image_name: str, + branch: str, + commit: str, + repository: str, + workflow: str, +) -> None: + """ + Look up the image attestation to see if the image has been built + on Github runners, and from a given repository. + """ + # TODO: Parse image and make sure it has a tag. Might even check for a digest. + # parsed = registry.parse_image_location(image) + + verified = attestations.verify(image_name, branch, commit, repository, workflow) + if verified: + click.echo( + f"🎉 Successfully verified image '{image_name}' and its associated claims:" + ) + click.echo(f"- ✅ SLSA Level 3 provenance") + click.echo(f"- ✅ GitHub repo: {repository}") + click.echo(f"- ✅ GitHub actions workflow: {workflow}") + click.echo(f"- ✅ Git branch: {branch}") + click.echo(f"- ✅ Git commit: {commit}") + + if __name__ == "__main__": main() From af55d26c2e0d3b4ec889e529bdad77c12185423e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Tue, 11 Feb 2025 19:19:54 +0100 Subject: [PATCH 05/31] Add documentation for independent container updates --- .../independent-container-updates.md | 83 +++++++++++++++++++ 1 file changed, 83 insertions(+) create mode 100644 docs/developer/independent-container-updates.md diff --git a/docs/developer/independent-container-updates.md b/docs/developer/independent-container-updates.md new file mode 100644 index 0000000..9f008a0 --- /dev/null +++ b/docs/developer/independent-container-updates.md @@ -0,0 +1,83 @@ +# Independent Container Updates + +Since version 0.9.0, Dangerzone is able to ship container images independently +from releases of the software. + +One of the main benefits of doing so is to shorten the time neede to distribute the security fixes for the containers. Being the place where the actual conversion of documents happen, it's a way to keep dangerzone users secure. + +If you are a dangerzone user, this all happens behind the curtain, and you should not have to know anything about that to enjoy these "in-app" updates. If you are using dangerzone in an air-gapped environment, check the sections below. + +## Checking attestations + +Each night, new images are built and pushed to the container registry, alongside +with a provenance attestation, enabling anybody to ensure that the image has +been originally built by Github CI runners, from a defined source repository (in our case `freedomofpress/dangerzone`). + +To verify the attestations against our expectations, use the following command: +```bash +dangerzone-image attest-provenance ghcr.io/freedomofpress/dangerzone/dangerzone --repository freedomofpress/dangerzone +``` + +In case of sucess, it will report back: + +``` +🎉 Successfully verified image +'ghcr.io/freedomofpress/dangerzone/dangerzone:@sha256:' +and its associated claims: +- ✅ SLSA Level 3 provenance +- ✅ GitHub repo: freedomofpress/dangerzone +- ✅ GitHub actions workflow: +- ✅ Git branch: +- ✅ Git commit: +``` + +## Sign and publish the remote image + +Once the image has been reproduced locally, we can add a signature to the container registry, +and update the `latest` tag to point to the proper hash. + +```bash +cosign sign --sk ghcr.io/freedomofpress/dangerzone/dangerzone:${TAG}@sha256:${DIGEST} + +# And mark bump latest +crane auth login ghcr.io -u USERNAME --password $(cat pat_token) +crane tag ghcr.io/freedomofpress/dangerzone/dangerzone@sha256:${DIGEST} latest +``` + +## Install updates + +To check if a new container image has been released, and update your local installation with it, you can use the following commands: + +```bash +dangerzone-image upgrade ghcr.io/freedomofpress/dangerzone/dangerzone +``` + +## Verify locally + +You can verify that the image you have locally matches the stored signatures, and that these have been signed with a trusted public key: + +```bash +dangerzone-image verify-local ghcr.io/freedomofpress/dangerzone/dangerzone +``` + +## Installing image updates to air-gapped environments + +Three steps are required: + +1. Prepare the archive +2. Transfer the archive to the air-gapped system +3. Install the archive on the air-gapped system + +This archive will contain all the needed material to validate that the new container image has been signed and is valid. + +On the machine on which you prepare the packages: + +```bash +dangerzone-image prepare-archive --output dz-fa94872.tar ghcr.io/freedomofpress/dangerzone/dangerzone@sha256: +``` + +On the airgapped machine, copy the file and run the following command: + +```bash +dangerzone-image load-archive dz-fa94872.tar +``` From 5c9a38d3706ee985be95cb287cd2a6ba6ed40ce5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Tue, 11 Feb 2025 19:23:05 +0100 Subject: [PATCH 06/31] (WIP) Check for container updates rather than using `image-id.txt` --- dangerzone/isolation_provider/container.py | 59 +++++++++------------- 1 file changed, 23 insertions(+), 36 deletions(-) diff --git a/dangerzone/isolation_provider/container.py b/dangerzone/isolation_provider/container.py index 0213cde..bc810d4 100644 --- a/dangerzone/isolation_provider/container.py +++ b/dangerzone/isolation_provider/container.py @@ -5,7 +5,7 @@ import shlex import subprocess from typing import List, Tuple -from .. import container_utils, errors +from .. import container_utils, errors, updater from ..document import Document from ..util import get_resource_path, get_subprocess_startupinfo from .base import IsolationProvider, terminate_process_group @@ -78,41 +78,23 @@ class Container(IsolationProvider): @staticmethod def install() -> bool: - """Install the container image tarball, or verify that it's already installed. + """Check if an update is available and install it if necessary.""" + # XXX Do this only if users have optted in to auto-updates - Perform the following actions: - 1. Get the tags of any locally available images that match Dangerzone's image - name. - 2. Get the expected image tag from the image-id.txt file. - - If this tag is present in the local images, then we can return. - - Else, prune the older container images and continue. - 3. Load the image tarball and make sure it matches the expected tag. - """ - old_tags = container_utils.list_image_tags() - expected_tag = container_utils.get_expected_tag() - - if expected_tag not in old_tags: - # Prune older container images. - log.info( - f"Could not find a Dangerzone container image with tag '{expected_tag}'" + # # Load the image tarball into the container runtime. + update_available, image_digest = updater.is_update_available( + container_utils.CONTAINER_NAME + ) + if update_available: + updater.upgrade_container_image( + container_utils.CONTAINER_NAME, + image_digest, + updater.DEFAULT_PUBKEY_LOCATION, ) - for tag in old_tags: - container_utils.delete_image_tag(tag) - else: - return True - # Load the image tarball into the container runtime. - container_utils.load_image_tarball() - - # Check that the container image has the expected image tag. - # See https://github.com/freedomofpress/dangerzone/issues/988 for an example - # where this was not the case. - new_tags = container_utils.list_image_tags() - if expected_tag not in new_tags: - raise errors.ImageNotPresentException( - f"Could not find expected tag '{expected_tag}' after loading the" - " container image tarball" - ) + updater.verify_local_image( + container_utils.CONTAINER_NAME, updater.DEFAULT_PUBKEY_LOCATION + ) return True @@ -193,6 +175,13 @@ class Container(IsolationProvider): name: str, ) -> subprocess.Popen: container_runtime = container_utils.get_runtime() + + image_digest = container_utils.get_local_image_digest( + container_utils.CONTAINER_NAME + ) + updater.verify_local_image( + container_utils.CONTAINER_NAME, updater.DEFAULT_PUBKEY_LOCATION + ) security_args = self.get_runtime_security_args() debug_args = [] if self.debug: @@ -201,9 +190,7 @@ class Container(IsolationProvider): enable_stdin = ["-i"] set_name = ["--name", name] prevent_leakage_args = ["--rm"] - image_name = [ - container_utils.CONTAINER_NAME + ":" + container_utils.get_expected_tag() - ] + image_name = [container_utils.CONTAINER_NAME + "@sha256:" + image_digest] args = ( ["run"] + security_args From 27647cc309349d29da41d3a3ef5f497a0ffe689b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Wed, 12 Feb 2025 11:40:36 +0100 Subject: [PATCH 07/31] fixup! Download and verify cosign signatures --- dangerzone/container_utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dangerzone/container_utils.py b/dangerzone/container_utils.py index 863a871..1d20a1c 100644 --- a/dangerzone/container_utils.py +++ b/dangerzone/container_utils.py @@ -187,7 +187,7 @@ def container_pull(image: str) -> bool: return process.returncode == 0 -def get_local_image_digest(image: str) -> Optional[str]: +def get_local_image_digest(image: str) -> str: """ Returns a image hash from a local image name """ From f6562ae59c3d71ecfd79dfef9cc4f25a5e0e61a4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Wed, 12 Feb 2025 11:40:36 +0100 Subject: [PATCH 08/31] fixup! Download and verify cosign signatures --- dangerzone/container_utils.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/dangerzone/container_utils.py b/dangerzone/container_utils.py index 1d20a1c..98e745c 100644 --- a/dangerzone/container_utils.py +++ b/dangerzone/container_utils.py @@ -204,6 +204,6 @@ def get_local_image_digest(image: str) -> str: ) return lines[0].replace("sha256:", "") except subprocess.CalledProcessError as e: - return None - else: - return result.stdout.strip().decode().strip("sha256:") + raise errors.ImageNotPresentException( + f"The image {image} does not exist locally" + ) From 7002ab85a092998c0ed661d33fe9bc6a6ab3ec93 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Wed, 12 Feb 2025 11:40:36 +0100 Subject: [PATCH 09/31] fixup! Download and verify cosign signatures --- dangerzone/updater/errors.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/dangerzone/updater/errors.py b/dangerzone/updater/errors.py index d302975..6b75c0e 100644 --- a/dangerzone/updater/errors.py +++ b/dangerzone/updater/errors.py @@ -52,3 +52,7 @@ class LocalSignatureNotFound(SignatureError): class CosignNotInstalledError(SignatureError): pass + + +class InvalidLogIndex(SignatureError): + pass From 6aff84549364a548fbf738d5e62a00e8b35c9105 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Wed, 12 Feb 2025 11:40:36 +0100 Subject: [PATCH 10/31] fixup! Download and verify cosign signatures --- dangerzone/updater/signatures.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dangerzone/updater/signatures.py b/dangerzone/updater/signatures.py index e5f1189..4503350 100644 --- a/dangerzone/updater/signatures.py +++ b/dangerzone/updater/signatures.py @@ -108,7 +108,7 @@ def verify_signature(signature: dict, image_digest: str, pubkey: str) -> bool: return False -def is_update_available(image: str) -> (bool, Optional[str]): +def is_update_available(image: str) -> Tuple[bool, Optional[str]]: remote_digest = registry.get_manifest_digest(image) local_digest = runtime.get_local_image_digest(image) log.debug("Remote digest: %s", remote_digest) From db33038c23427da1931b17c4d475417d1fd248e6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Wed, 12 Feb 2025 11:40:36 +0100 Subject: [PATCH 11/31] fixup! Download and verify cosign signatures --- dangerzone/updater/signatures.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dangerzone/updater/signatures.py b/dangerzone/updater/signatures.py index 4503350..24ebff1 100644 --- a/dangerzone/updater/signatures.py +++ b/dangerzone/updater/signatures.py @@ -198,7 +198,7 @@ def upgrade_container_image_airgapped(container_tar: str, pubkey: str) -> str: in ("dev.cosignproject.cosign/imageIndex", "dev.cosignproject.cosign/image") ] - with open(signature_filename, "rb") as f: + with open(signature_filename, "r") as f: image_name, signatures = convert_oci_images_signatures(json.load(f), tmpdir) log.info(f"Found image name: {image_name}") From 5001328ae9b107cf7b31cc7d4e87793916d973c5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Wed, 12 Feb 2025 11:40:36 +0100 Subject: [PATCH 12/31] fixup! Download and verify cosign signatures --- dangerzone/updater/signatures.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dangerzone/updater/signatures.py b/dangerzone/updater/signatures.py index 24ebff1..6b461d4 100644 --- a/dangerzone/updater/signatures.py +++ b/dangerzone/updater/signatures.py @@ -232,8 +232,8 @@ def upgrade_container_image_airgapped(container_tar: str, pubkey: str) -> str: def convert_oci_images_signatures( - signatures_manifest: List[Dict], tmpdir: str -) -> (str, List[Dict]): + signatures_manifest: Dict, tmpdir: str +) -> Tuple[str, List[Dict]]: def _to_cosign_signature(layer: Dict) -> Dict: signature = layer["annotations"]["dev.cosignproject.cosign/signature"] bundle = json.loads(layer["annotations"]["dev.sigstore.cosign/bundle"]) From 22d235cabd1ac247e413114acc5f0d810103ca57 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Wed, 12 Feb 2025 11:40:36 +0100 Subject: [PATCH 13/31] fixup! Download and verify cosign signatures --- dangerzone/updater/signatures.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dangerzone/updater/signatures.py b/dangerzone/updater/signatures.py index 6b461d4..2385e9a 100644 --- a/dangerzone/updater/signatures.py +++ b/dangerzone/updater/signatures.py @@ -252,7 +252,7 @@ def convert_oci_images_signatures( "RFC3161Timestamp": None, } - layers = signatures_manifest["layers"] + layers = signatures_manifest.get("layers", []) signatures = [_to_cosign_signature(layer) for layer in layers] payload_location = _get_blob(tmpdir, layers[0]["digest"]) From 5a4ddb17c94ac7117844fe3a7dc320fce2ce2719 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Wed, 12 Feb 2025 11:40:36 +0100 Subject: [PATCH 14/31] fixup! Download and verify cosign signatures --- dangerzone/updater/signatures.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/dangerzone/updater/signatures.py b/dangerzone/updater/signatures.py index 2385e9a..57b3d1d 100644 --- a/dangerzone/updater/signatures.py +++ b/dangerzone/updater/signatures.py @@ -255,6 +255,9 @@ def convert_oci_images_signatures( layers = signatures_manifest.get("layers", []) signatures = [_to_cosign_signature(layer) for layer in layers] + if not signatures: + raise errors.SignatureExtractionError() + payload_location = _get_blob(tmpdir, layers[0]["digest"]) with open(payload_location, "r") as f: payload = json.load(f) From 1e9e468e3766eb8fd518a94589f9acdd6b3081ac Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Wed, 12 Feb 2025 11:40:36 +0100 Subject: [PATCH 15/31] fixup! Download and verify cosign signatures --- dangerzone/updater/signatures.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dangerzone/updater/signatures.py b/dangerzone/updater/signatures.py index 57b3d1d..7591631 100644 --- a/dangerzone/updater/signatures.py +++ b/dangerzone/updater/signatures.py @@ -383,7 +383,7 @@ def get_remote_signatures(image: str, digest: str) -> List[Dict]: return signatures -def prepare_airgapped_archive(image_name, destination): +def prepare_airgapped_archive(image_name: str, destination: str): if "@sha256:" not in image_name: raise errors.AirgappedImageDownloadError( "The image name must include a digest, e.g. ghcr.io/freedomofpress/dangerzone/dangerzone@sha256:123456" From 379c9f8f004340d07013710ca1ff2a6531368f41 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Wed, 12 Feb 2025 11:40:36 +0100 Subject: [PATCH 16/31] fixup! Add a `dangerzone-image` CLI script --- dangerzone/updater/registry.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dangerzone/updater/registry.py b/dangerzone/updater/registry.py index a5dd1db..9459c64 100644 --- a/dangerzone/updater/registry.py +++ b/dangerzone/updater/registry.py @@ -52,7 +52,7 @@ def parse_image_location(input_string: str) -> Image: ) -def _get_auth_header(image) -> Dict[str, str]: +def _get_auth_header(image: Image) -> Dict[str, str]: auth_url = f"https://{image.registry}/token" response = requests.get( auth_url, From d667c284c7729728ec0e51ca8e26510c165b4b39 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Wed, 12 Feb 2025 11:40:36 +0100 Subject: [PATCH 17/31] fixup! Add a `dangerzone-image` CLI script --- dangerzone/updater/registry.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dangerzone/updater/registry.py b/dangerzone/updater/registry.py index 9459c64..a7ee519 100644 --- a/dangerzone/updater/registry.py +++ b/dangerzone/updater/registry.py @@ -66,7 +66,7 @@ def _get_auth_header(image: Image) -> Dict[str, str]: return {"Authorization": f"Bearer {token}"} -def _url(image): +def _url(image: Image) -> str: return f"https://{image.registry}/v2/{image.namespace}/{image.image_name}" From 431f0cb80389d1d5cc55067b756e9353a30c92d6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Wed, 12 Feb 2025 11:40:36 +0100 Subject: [PATCH 18/31] fixup! Add a `dangerzone-image` CLI script --- dangerzone/updater/registry.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dangerzone/updater/registry.py b/dangerzone/updater/registry.py index a7ee519..b988e12 100644 --- a/dangerzone/updater/registry.py +++ b/dangerzone/updater/registry.py @@ -79,7 +79,7 @@ def list_tags(image_str: str) -> list: return tags -def get_manifest(image_str) -> requests.Response: +def get_manifest(image_str: str) -> requests.Response: """Get manifest information for a specific tag""" image = parse_image_location(image_str) manifest_url = f"{_url(image)}/manifests/{image.tag}" From aac6c6334a6b367bfa2600ec08fc8494b0aeae16 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Wed, 12 Feb 2025 11:40:36 +0100 Subject: [PATCH 19/31] fixup! Add a `dangerzone-image` CLI script --- dangerzone/updater/registry.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dangerzone/updater/registry.py b/dangerzone/updater/registry.py index b988e12..74674ef 100644 --- a/dangerzone/updater/registry.py +++ b/dangerzone/updater/registry.py @@ -93,7 +93,7 @@ def get_manifest(image_str: str) -> requests.Response: return response -def list_manifests(image_str) -> list: +def list_manifests(image_str: str) -> list: return get_manifest(image_str).json().get("manifests") From ccae6c5b163c929276186d7f1ef8c7ed4dd878e7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Wed, 12 Feb 2025 11:40:36 +0100 Subject: [PATCH 20/31] fixup! Add a `dangerzone-image` CLI script --- dangerzone/updater/registry.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dangerzone/updater/registry.py b/dangerzone/updater/registry.py index 74674ef..841e631 100644 --- a/dangerzone/updater/registry.py +++ b/dangerzone/updater/registry.py @@ -97,7 +97,7 @@ def list_manifests(image_str: str) -> list: return get_manifest(image_str).json().get("manifests") -def get_blob(image, digest: str) -> requests.Response: +def get_blob(image: Image, digest: str) -> requests.Response: response = requests.get( f"{_url(image)}/blobs/{digest}", headers={ From 5202d7927029ea0b65ada8e9f75c14b1cb283f77 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Wed, 12 Feb 2025 11:40:36 +0100 Subject: [PATCH 21/31] fixup! Add a `dangerzone-image` CLI script --- dangerzone/updater/registry.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/dangerzone/updater/registry.py b/dangerzone/updater/registry.py index 841e631..46a39b7 100644 --- a/dangerzone/updater/registry.py +++ b/dangerzone/updater/registry.py @@ -99,10 +99,7 @@ def list_manifests(image_str: str) -> list: def get_blob(image: Image, digest: str) -> requests.Response: response = requests.get( - f"{_url(image)}/blobs/{digest}", - headers={ - "Authorization": f"Bearer {_get_auth_token(image)}", - }, + f"{_url(image)}/blobs/{digest}", headers=_get_auth_header(image) ) response.raise_for_status() return response From 988971096c292bc50f45298404438eb303987a74 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Wed, 12 Feb 2025 11:40:36 +0100 Subject: [PATCH 22/31] fixup! Add a `dangerzone-image` CLI script --- dangerzone/updater/registry.py | 1 - 1 file changed, 1 deletion(-) diff --git a/dangerzone/updater/registry.py b/dangerzone/updater/registry.py index 46a39b7..4088569 100644 --- a/dangerzone/updater/registry.py +++ b/dangerzone/updater/registry.py @@ -108,7 +108,6 @@ def get_blob(image: Image, digest: str) -> requests.Response: def get_manifest_digest( image_str: str, tag_manifest_content: Optional[bytes] = None ) -> str: - image = parse_image_location(image_str) if not tag_manifest_content: tag_manifest_content = get_manifest(image).content From 668ee71895b9baf03b8c850132d8439b396bb934 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Wed, 12 Feb 2025 11:40:36 +0100 Subject: [PATCH 23/31] fixup! Add a `dangerzone-image` CLI script --- dangerzone/updater/registry.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dangerzone/updater/registry.py b/dangerzone/updater/registry.py index 4088569..c690b9f 100644 --- a/dangerzone/updater/registry.py +++ b/dangerzone/updater/registry.py @@ -109,6 +109,6 @@ def get_manifest_digest( image_str: str, tag_manifest_content: Optional[bytes] = None ) -> str: if not tag_manifest_content: - tag_manifest_content = get_manifest(image).content + tag_manifest_content = get_manifest(image_str).content return sha256(tag_manifest_content).hexdigest() From 0724f86b13e756666024d178d94b75e587bf29fe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Wed, 12 Feb 2025 11:40:36 +0100 Subject: [PATCH 24/31] fixup! Publish and attest multi-architecture container images --- dangerzone/updater/attestations.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dangerzone/updater/attestations.py b/dangerzone/updater/attestations.py index 90bf152..6650f46 100644 --- a/dangerzone/updater/attestations.py +++ b/dangerzone/updater/attestations.py @@ -34,7 +34,7 @@ predicate: {{ // This condition verifies that the image was generated from // the source repository we expect. Replace this with your // repository. - uri: =~"^git\\+https://github.com/{repo}@refs/heads/{branch}" + uri: =~"^git\\+https://github.com/{repository}@refs/heads/{branch}" // Add a condition to check for a specific commit hash digest: {{ sha1: "{commit}" From 537d23e23360586f3cce644e1ca3ebadab553eae Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Wed, 12 Feb 2025 11:40:36 +0100 Subject: [PATCH 25/31] fixup! Publish and attest multi-architecture container images --- dangerzone/updater/attestations.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/dangerzone/updater/attestations.py b/dangerzone/updater/attestations.py index 6650f46..b8f5db7 100644 --- a/dangerzone/updater/attestations.py +++ b/dangerzone/updater/attestations.py @@ -45,10 +45,6 @@ predicate: {{ """ -def generate_cue_policy(repo, workflow, commit, branch): - return CUE_POLICY.format(repo=repo, workflow=workflow, commit=commit, branch=branch) - - def verify( image_name: str, branch: str, From 5acb302acfceeb93c6738b4235a0323966f41a4f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Wed, 12 Feb 2025 11:40:36 +0100 Subject: [PATCH 26/31] fixup! Publish and attest multi-architecture container images --- dangerzone/updater/attestations.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/dangerzone/updater/attestations.py b/dangerzone/updater/attestations.py index b8f5db7..bdf1ef6 100644 --- a/dangerzone/updater/attestations.py +++ b/dangerzone/updater/attestations.py @@ -57,7 +57,9 @@ def verify( on Github runners, and from a given repository. """ cosign.ensure_installed() - policy = generate_cue_policy(repository, workflow, commit, branch) + policy = CUE_POLICY.format( + repository=repository, workflow=workflow, commit=commit, branch=branch + ) # Put the value in files and verify with cosign with ( From e078e9bb8293ce8837916d1857c28d9440a6c069 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Wed, 12 Feb 2025 11:53:36 +0100 Subject: [PATCH 27/31] fixup! 1e9e468e3766eb8fd518a94589f9acdd6b3081ac --- dangerzone/updater/signatures.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dangerzone/updater/signatures.py b/dangerzone/updater/signatures.py index 7591631..4e9ab5e 100644 --- a/dangerzone/updater/signatures.py +++ b/dangerzone/updater/signatures.py @@ -383,7 +383,7 @@ def get_remote_signatures(image: str, digest: str) -> List[Dict]: return signatures -def prepare_airgapped_archive(image_name: str, destination: str): +def prepare_airgapped_archive(image_name: str, destination: str) -> None: if "@sha256:" not in image_name: raise errors.AirgappedImageDownloadError( "The image name must include a digest, e.g. ghcr.io/freedomofpress/dangerzone/dangerzone@sha256:123456" From 60674ea6b4e80379c82c9c19b5f1ed12175ef02e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Wed, 12 Feb 2025 11:53:36 +0100 Subject: [PATCH 28/31] fixup! (WIP) Check for container updates rather than using `image-id.txt` --- dangerzone/isolation_provider/container.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dangerzone/isolation_provider/container.py b/dangerzone/isolation_provider/container.py index bc810d4..82ebfe6 100644 --- a/dangerzone/isolation_provider/container.py +++ b/dangerzone/isolation_provider/container.py @@ -85,7 +85,7 @@ class Container(IsolationProvider): update_available, image_digest = updater.is_update_available( container_utils.CONTAINER_NAME ) - if update_available: + if update_available and image_digest: updater.upgrade_container_image( container_utils.CONTAINER_NAME, image_digest, From 835970b541ab2d82d3d147a54b7c56ecb2031c12 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Wed, 12 Feb 2025 12:05:20 +0100 Subject: [PATCH 29/31] fixup! (WIP) Check for container updates rather than using `image-id.txt` --- dangerzone/isolation_provider/container.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/dangerzone/isolation_provider/container.py b/dangerzone/isolation_provider/container.py index 82ebfe6..cbd23b3 100644 --- a/dangerzone/isolation_provider/container.py +++ b/dangerzone/isolation_provider/container.py @@ -180,7 +180,9 @@ class Container(IsolationProvider): container_utils.CONTAINER_NAME ) updater.verify_local_image( - container_utils.CONTAINER_NAME, updater.DEFAULT_PUBKEY_LOCATION + container_utils.CONTAINER_NAME, + updater.DEFAULT_PUBKEY_LOCATION, + image_digest, ) security_args = self.get_runtime_security_args() debug_args = [] From a540fc5b08ac4363f705597c3127641e30958947 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Wed, 12 Feb 2025 18:23:12 +0100 Subject: [PATCH 30/31] (WIP) Add tests --- dangerzone/container_utils.py | 7 +- dangerzone/isolation_provider/container.py | 22 +- dangerzone/updater/signatures.py | 89 ++++--- tests/assets/signatures/README.md | 7 + ...d955e68ee3e07b41b9d53f4c8cc9929a68a67.json | 18 ++ ...aa9338681e64dd3e34a34873866cb051d694e.json | 18 ++ ...5745d532d7a4079886e1647924bee7ef1c14d.json | 18 ++ ...2230dc6566997f852ef5d62b0338b46796e01.json | 18 ++ ...d955e68ee3e07b41b9d53f4c8cc9929a68a67.json | 18 ++ ...aa9338681e64dd3e34a34873866cb051d694e.json | 18 ++ .../README.md | 1 + ...d955e68ee3e07b41b9d53f4c8cc9929a68a67.json | 1 + ...aa9338681e64dd3e34a34873866cb051d694e.json | 1 + ...5745d532d7a4079886e1647924bee7ef1c14d.json | 1 + ...2230dc6566997f852ef5d62b0338b46796e01.json | 1 + ...bac18522b35b2491fdf716236a0b3502a2ca7.json | 1 + tests/assets/test.pub.key | 4 + tests/test_signatures.py | 221 ++++++++++++++++++ 18 files changed, 420 insertions(+), 44 deletions(-) create mode 100644 tests/assets/signatures/README.md create mode 100644 tests/assets/signatures/invalid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/19e8eacd75879d05f6621c2ea8dd955e68ee3e07b41b9d53f4c8cc9929a68a67.json create mode 100644 tests/assets/signatures/invalid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/220b52200e3e47b1b42010667fcaa9338681e64dd3e34a34873866cb051d694e.json create mode 100644 tests/assets/signatures/invalid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/4da441235e84e93518778827a5c5745d532d7a4079886e1647924bee7ef1c14d.json create mode 100644 tests/assets/signatures/invalid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/7b21dbdebffed855621dfcdeaa52230dc6566997f852ef5d62b0338b46796e01.json create mode 100644 tests/assets/signatures/tempered/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/19e8eacd75879d05f6621c2ea8dd955e68ee3e07b41b9d53f4c8cc9929a68a67.json create mode 100644 tests/assets/signatures/tempered/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/220b52200e3e47b1b42010667fcaa9338681e64dd3e34a34873866cb051d694e.json create mode 100644 tests/assets/signatures/tempered/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/README.md create mode 100644 tests/assets/signatures/valid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/19e8eacd75879d05f6621c2ea8dd955e68ee3e07b41b9d53f4c8cc9929a68a67.json create mode 100644 tests/assets/signatures/valid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/220b52200e3e47b1b42010667fcaa9338681e64dd3e34a34873866cb051d694e.json create mode 100644 tests/assets/signatures/valid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/4da441235e84e93518778827a5c5745d532d7a4079886e1647924bee7ef1c14d.json create mode 100644 tests/assets/signatures/valid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/7b21dbdebffed855621dfcdeaa52230dc6566997f852ef5d62b0338b46796e01.json create mode 100644 tests/assets/signatures/valid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/fa948726aac29a6ac49f01ec8fbbac18522b35b2491fdf716236a0b3502a2ca7.json create mode 100644 tests/assets/test.pub.key create mode 100644 tests/test_signatures.py diff --git a/dangerzone/container_utils.py b/dangerzone/container_utils.py index 98e745c..ce31c80 100644 --- a/dangerzone/container_utils.py +++ b/dangerzone/container_utils.py @@ -202,7 +202,12 @@ def get_local_image_digest(image: str) -> str: raise errors.MultipleImagesFoundException( f"Expected a single line of output, got {len(lines)} lines" ) - return lines[0].replace("sha256:", "") + image_digest = lines[0].replace("sha256:", "") + if not image_digest: + raise errors.ImageNotPresentException( + f"The image {image} does not exist locally" + ) + return image_digest except subprocess.CalledProcessError as e: raise errors.ImageNotPresentException( f"The image {image} does not exist locally" diff --git a/dangerzone/isolation_provider/container.py b/dangerzone/isolation_provider/container.py index cbd23b3..f2c0d2c 100644 --- a/dangerzone/isolation_provider/container.py +++ b/dangerzone/isolation_provider/container.py @@ -79,18 +79,19 @@ class Container(IsolationProvider): @staticmethod def install() -> bool: """Check if an update is available and install it if necessary.""" - # XXX Do this only if users have optted in to auto-updates + # XXX Do this only if users have opted in to auto-updates - # # Load the image tarball into the container runtime. - update_available, image_digest = updater.is_update_available( - container_utils.CONTAINER_NAME - ) - if update_available and image_digest: - updater.upgrade_container_image( - container_utils.CONTAINER_NAME, - image_digest, - updater.DEFAULT_PUBKEY_LOCATION, + if False: # Comment this for now, just as an exemple of this can be implemented + # # Load the image tarball into the container runtime. + update_available, image_digest = updater.is_update_available( + container_utils.CONTAINER_NAME ) + if update_available and image_digest: + updater.upgrade_container_image( + container_utils.CONTAINER_NAME, + image_digest, + updater.DEFAULT_PUBKEY_LOCATION, + ) updater.verify_local_image( container_utils.CONTAINER_NAME, updater.DEFAULT_PUBKEY_LOCATION @@ -182,7 +183,6 @@ class Container(IsolationProvider): updater.verify_local_image( container_utils.CONTAINER_NAME, updater.DEFAULT_PUBKEY_LOCATION, - image_digest, ) security_args = self.get_runtime_security_args() debug_args = [] diff --git a/dangerzone/updater/signatures.py b/dangerzone/updater/signatures.py index 4e9ab5e..a407c2b 100644 --- a/dangerzone/updater/signatures.py +++ b/dangerzone/updater/signatures.py @@ -32,7 +32,7 @@ LAST_LOG_INDEX = SIGNATURES_PATH / "last_log_index" __all__ = [ "verify_signature", - "load_signatures", + "load_and_verify_signatures", "store_signatures", "verify_offline_image_signature", ] @@ -60,18 +60,22 @@ def signature_to_bundle(sig: Dict) -> Dict: } -def verify_signature(signature: dict, image_digest: str, pubkey: str) -> bool: +def verify_signature(signature: dict, image_digest: str, pubkey: str | Path) -> bool: """Verify a signature against a given public key""" # XXX - Also verfy the identity/docker-reference field against the expected value # e.g. ghcr.io/freedomofpress/dangerzone/dangerzone cosign.ensure_installed() signature_bundle = signature_to_bundle(signature) - - payload_bytes = b64decode(signature_bundle["Payload"]) - payload_digest = json.loads(payload_bytes)["critical"]["image"][ - "docker-manifest-digest" - ] + try: + payload_bytes = b64decode(signature_bundle["Payload"]) + payload_digest = json.loads(payload_bytes)["critical"]["image"][ + "docker-manifest-digest" + ] + except Exception as e: + raise errors.SignatureVerificationError( + f"Unable to extract the payload digest from the signature: {e}" + ) if payload_digest != f"sha256:{image_digest}": raise errors.SignatureMismatch( f"The signature does not match the image digest ({payload_digest}, {image_digest})" @@ -87,11 +91,14 @@ def verify_signature(signature: dict, image_digest: str, pubkey: str) -> bool: payload_file.write(payload_bytes) payload_file.flush() + if isinstance(pubkey, str): + pubkey = Path(pubkey) + cmd = [ "cosign", "verify-blob", "--key", - pubkey, + str(pubkey.absolute()), "--bundle", signature_file.name, payload_file.name, @@ -124,9 +131,14 @@ def verify_signatures( image_digest: str, pubkey: str, ) -> bool: + if len(signatures) < 1: + raise errors.SignatureVerificationError("No signatures found") + for signature in signatures: if not verify_signature(signature, image_digest, pubkey): - raise errors.SignatureVerificationError() + msg = f"Unable to verify signature for {image_digest} with pubkey {pubkey}" + raise errors.SignatureVerificationError(msg) + return True @@ -140,9 +152,14 @@ def get_last_log_index() -> int: def get_log_index_from_signatures(signatures: List[Dict]) -> int: - return reduce( - lambda acc, sig: max(acc, sig["Bundle"]["Payload"]["logIndex"]), signatures, 0 - ) + def _reducer(accumulator: int, signature: Dict) -> int: + try: + logIndex = int(signature["Bundle"]["Payload"]["logIndex"]) + except (KeyError, ValueError): + return accumulator + return max(accumulator, logIndex) + + return reduce(_reducer, signatures, 0) def write_log_index(log_index: int) -> None: @@ -278,13 +295,21 @@ def get_file_digest(file: Optional[str] = None, content: Optional[bytes] = None) return "" -def load_signatures(image_digest: str, pubkey: str) -> List[Dict]: +def load_and_verify_signatures( + image_digest: str, + pubkey: str, + bypass_verification: bool = False, + signatures_path: Optional[Path] = None, +) -> List[Dict]: """ Load signatures from the local filesystem See store_signatures() for the expected format. """ - pubkey_signatures = SIGNATURES_PATH / get_file_digest(pubkey) + if not signatures_path: + signatures_path = SIGNATURES_PATH + + pubkey_signatures = signatures_path / get_file_digest(pubkey) if not pubkey_signatures.exists(): msg = ( f"Cannot find a '{pubkey_signatures}' folder." @@ -294,7 +319,12 @@ def load_signatures(image_digest: str, pubkey: str) -> List[Dict]: with open(pubkey_signatures / f"{image_digest}.json") as f: log.debug("Loading signatures from %s", f.name) - return json.load(f) + signatures = json.load(f) + + if not bypass_verification: + verify_signatures(signatures, image_digest, pubkey) + + return signatures def store_signatures(signatures: list[Dict], image_digest: str, pubkey: str) -> None: @@ -352,32 +382,27 @@ def verify_local_image(image: str, pubkey: str) -> bool: raise errors.ImageNotFound(f"The image {image} does not exist locally") log.debug(f"Image digest: {image_digest}") - signatures = load_signatures(image_digest, pubkey) - if len(signatures) < 1: - raise errors.LocalSignatureNotFound("No signatures found") - - for signature in signatures: - if not verify_signature(signature, image_digest, pubkey): - msg = f"Unable to verify signature for {image} with pubkey {pubkey}" - raise errors.SignatureVerificationError(msg) + load_and_verify_signatures(image_digest, pubkey) return True def get_remote_signatures(image: str, digest: str) -> List[Dict]: - """Retrieve the signatures from the registry, via `cosign download`.""" + """Retrieve the signatures from the registry, via `cosign download signatures`.""" cosign.ensure_installed() - # XXX: try/catch here - process = subprocess.run( - ["cosign", "download", "signature", f"{image}@sha256:{digest}"], - capture_output=True, - check=True, - ) + try: + process = subprocess.run( + ["cosign", "download", "signature", f"{image}@sha256:{digest}"], + capture_output=True, + check=True, + ) + except subprocess.CalledProcessError as e: + raise errors.NoRemoteSignatures(e) - # XXX: Check the output first. # Remove the last return, split on newlines, convert from JSON signatures_raw = process.stdout.decode("utf-8").strip().split("\n") signatures = list(map(json.loads, signatures_raw)) + breakpoint() if len(signatures) < 1: raise errors.NoRemoteSignatures("No signatures found for the image") return signatures @@ -390,8 +415,8 @@ def prepare_airgapped_archive(image_name: str, destination: str) -> None: ) cosign.ensure_installed() - # Get the image from the registry + # Get the image from the registry with TemporaryDirectory() as tmpdir: msg = f"Downloading image {image_name}. \nIt might take a while." log.info(msg) diff --git a/tests/assets/signatures/README.md b/tests/assets/signatures/README.md new file mode 100644 index 0000000..e79adbc --- /dev/null +++ b/tests/assets/signatures/README.md @@ -0,0 +1,7 @@ +This folder contains signature-folders used for the testing the signatures implementation. + +The following folders are used: + +- `valid`: this folder contains signatures which should be considered valid and generated with the key available at `tests/assets/test.pub.key` +- `invalid`: this folder contains signatures which should be considered invalid, because their format doesn't match the expected one. e.g. it uses plain text instead of base64-encoded text. +- `tempered`: This folder contain signatures which have been tempered-with. The goal is to have signatures that looks valid, but actually aren't. diff --git a/tests/assets/signatures/invalid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/19e8eacd75879d05f6621c2ea8dd955e68ee3e07b41b9d53f4c8cc9929a68a67.json b/tests/assets/signatures/invalid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/19e8eacd75879d05f6621c2ea8dd955e68ee3e07b41b9d53f4c8cc9929a68a67.json new file mode 100644 index 0000000..8ff0ba9 --- /dev/null +++ b/tests/assets/signatures/invalid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/19e8eacd75879d05f6621c2ea8dd955e68ee3e07b41b9d53f4c8cc9929a68a67.json @@ -0,0 +1,18 @@ +[ + { + "Base64Signature": "Invalid base64 signature", + "Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hbG1ldC9kYW5nZXJ6b25lL2RhbmdlcnpvbmUifSwiaW1hZ2UiOnsiZG9ja2VyLW1hbmlmZXN0LWRpZ2VzdCI6InNoYTI1NjoxOWU4ZWFjZDc1ODc5ZDA1ZjY2MjFjMmVhOGRkOTU1ZTY4ZWUzZTA3YjQxYjlkNTNmNGM4Y2M5OTI5YTY4YTY3In0sInR5cGUiOiJjb3NpZ24gY29udGFpbmVyIGltYWdlIHNpZ25hdHVyZSJ9LCJvcHRpb25hbCI6bnVsbH0=", + "Cert": null, + "Chain": null, + "Bundle": { + "SignedEntryTimestamp": "MEUCIC9oXH9VVP96frVOmDw704FBqMN/Bpm2RMdTm6BtSwL/AiEA6mCIjhV65fYuy4CwjsIzQHi/oW6IBwtd6oCvN2dI6HQ=", + "Payload": { + "body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiJmMjEwNDJjY2RjOGU0ZjA1ZGEzNmE5ZjU4ODg5MmFlZGRlMzYzZTQ2ZWNjZGZjM2MyNzAyMTkwZDU0YTdmZmVlIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FWUNJUUNWaTJGUFI3Mjl1aHAvY3JFdUNTOW9yQzRhMnV0OHN3dDdTUnZXYUVSTGp3SWhBSlM1dzU3MHhsQnJsM2Nhd1Y1akQ1dk85RGh1dkNrdCtzOXJLdGc2NzVKQSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=", + "integratedTime": 1738752154, + "logIndex": 168898587, + "logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d" + } + }, + "RFC3161Timestamp": null + } +] \ No newline at end of file diff --git a/tests/assets/signatures/invalid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/220b52200e3e47b1b42010667fcaa9338681e64dd3e34a34873866cb051d694e.json b/tests/assets/signatures/invalid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/220b52200e3e47b1b42010667fcaa9338681e64dd3e34a34873866cb051d694e.json new file mode 100644 index 0000000..34ff6e4 --- /dev/null +++ b/tests/assets/signatures/invalid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/220b52200e3e47b1b42010667fcaa9338681e64dd3e34a34873866cb051d694e.json @@ -0,0 +1,18 @@ +[ + { + "Base64Signature": "MEQCICi2AOAJbS1k3334VMSo+qxaI4f5VoNnuVExZ4tfIu7rAiAiwuKdo8rGfFMGMLSFSQvoLF3JuwFy4JtNW6kQlwH7vg==", + "Payload": "Invalid base64 payload", + "Cert": null, + "Chain": null, + "Bundle": { + "SignedEntryTimestamp": "MEUCIEvx6NtFeAag9TplqMLjVczT/tC6lpKe9SnrxbehBlxfAiEA07BE3f5JsMLsUsmHD58D6GaZr2yz+yQ66Os2ps8oKz8=", + "Payload": { + "body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiI4YmJmNGRiNjBmMmExM2IyNjI2NTI3MzljNWM5ZTYwNjNiMDYyNjVlODU1Zjc3MTdjMTdlYWY4YzViZTQyYWUyIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FUUNJQ2kyQU9BSmJTMWszMzM0Vk1TbytxeGFJNGY1Vm9ObnVWRXhaNHRmSXU3ckFpQWl3dUtkbzhyR2ZGTUdNTFNGU1F2b0xGM0p1d0Z5NEp0Tlc2a1Fsd0g3dmc9PSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=", + "integratedTime": 1738859497, + "logIndex": 169356501, + "logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d" + } + }, + "RFC3161Timestamp": null + } +] \ No newline at end of file diff --git a/tests/assets/signatures/invalid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/4da441235e84e93518778827a5c5745d532d7a4079886e1647924bee7ef1c14d.json b/tests/assets/signatures/invalid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/4da441235e84e93518778827a5c5745d532d7a4079886e1647924bee7ef1c14d.json new file mode 100644 index 0000000..15e9fae --- /dev/null +++ b/tests/assets/signatures/invalid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/4da441235e84e93518778827a5c5745d532d7a4079886e1647924bee7ef1c14d.json @@ -0,0 +1,18 @@ +[ + { + "Base64Signature": "MEQCIDJxvB7lBU+VNYBD0xw/3Bi8wY7GPJ2fBP7mUFbguApoAiAIpuQT+sgatOY6yXkkA8K/sM40d5/gt7jQywWPbq5+iw==", + "Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hcHlyZ2lvL2RhbmdlcnpvbmUvZGFuZ2Vyem9uZSJ9LCJpbWFnZSI6eyJkb2NrZXItbWFuaWZlc3QtZGlnZXN0Ijoic2hhMjU2OjRkYTQ0MTIzNWU4NGU5MzUxODc3ODgyN2E1YzU3NDVkNTMyZDdhNDA3OTg4NmUxNjQ3OTI0YmVlN2VmMWMxNGQifSwidHlwZSI6ImNvc2lnbiBjb250YWluZXIgaW1hZ2Ugc2lnbmF0dXJlIn0sIm9wdGlvbmFsIjpudWxsfQ==", + "Cert": null, + "Chain": null, + "Bundle": { + "SignedEntryTimestamp": "Invalid signed entry timestamp", + "Payload": { + "body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiIyMGE2ZDU1NTk4Y2U0NjU3NWZkZjViZGU3YzhhYWE2YTU2ZjZlMGRmOWNiYTY1MTJhMDAxODhjMTU1NGIzYjE3In19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FUUNJREp4dkI3bEJVK1ZOWUJEMHh3LzNCaTh3WTdHUEoyZkJQN21VRmJndUFwb0FpQUlwdVFUK3NnYXRPWTZ5WGtrQThLL3NNNDBkNS9ndDdqUXl3V1BicTUraXc9PSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=", + "integratedTime": 1738688492, + "logIndex": 168652066, + "logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d" + } + }, + "RFC3161Timestamp": null + } +] \ No newline at end of file diff --git a/tests/assets/signatures/invalid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/7b21dbdebffed855621dfcdeaa52230dc6566997f852ef5d62b0338b46796e01.json b/tests/assets/signatures/invalid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/7b21dbdebffed855621dfcdeaa52230dc6566997f852ef5d62b0338b46796e01.json new file mode 100644 index 0000000..9594f7f --- /dev/null +++ b/tests/assets/signatures/invalid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/7b21dbdebffed855621dfcdeaa52230dc6566997f852ef5d62b0338b46796e01.json @@ -0,0 +1,18 @@ +[ + { + "Base64Signature": "MEUCIQC2WlJH+B8VuX1c6i4sDwEGEZc53hXUD6/ds9TMJ3HrfwIgCxSnrNYRD2c8XENqfqc+Ik1gx0DK9kPNsn/Lt8V/dCo=", + "Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hbG1ldC9kYW5nZXJ6b25lL2RhbmdlcnpvbmUifSwiaW1hZ2UiOnsiZG9ja2VyLW1hbmlmZXN0LWRpZ2VzdCI6InNoYTI1Njo3YjIxZGJkZWJmZmVkODU1NjIxZGZjZGVhYTUyMjMwZGM2NTY2OTk3Zjg1MmVmNWQ2MmIwMzM4YjQ2Nzk2ZTAxIn0sInR5cGUiOiJjb3NpZ24gY29udGFpbmVyIGltYWdlIHNpZ25hdHVyZSJ9LCJvcHRpb25hbCI6bnVsbH0=", + "Cert": null, + "Chain": null, + "Bundle": { + "SignedEntryTimestamp": "MEYCIQDn04gOHqiZcwUO+NVV9+29+abu6O/k1ve9zatJ3gVu9QIhAJL3E+mqVPdMPfMSdhHt2XDQsYzfRDDJNJEABQlbV3Jg", + "Payload": { + "body": "Invalid bundle payload body", + "integratedTime": 1738862352, + "logIndex": 169369149, + "logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d" + } + }, + "RFC3161Timestamp": null + } +] \ No newline at end of file diff --git a/tests/assets/signatures/tempered/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/19e8eacd75879d05f6621c2ea8dd955e68ee3e07b41b9d53f4c8cc9929a68a67.json b/tests/assets/signatures/tempered/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/19e8eacd75879d05f6621c2ea8dd955e68ee3e07b41b9d53f4c8cc9929a68a67.json new file mode 100644 index 0000000..54a49bf --- /dev/null +++ b/tests/assets/signatures/tempered/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/19e8eacd75879d05f6621c2ea8dd955e68ee3e07b41b9d53f4c8cc9929a68a67.json @@ -0,0 +1,18 @@ +[ + { + "Base64Signature": "MAIhAJWLYU9Hvb26Gn9ysS4JL2isLhra63yzC3tJG9ZoREuPAiEAlLnDnvTGUGuXdxrBXmMPm870OG68KS36z2sq2DrvkkAK", + "Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hbG1ldC9kYW5nZXJ6b25lL2RhbmdlcnpvbmUifSwiaW1hZ2UiOnsiZG9ja2VyLW1hbmlmZXN0LWRpZ2VzdCI6InNoYTI1NjoxOWU4ZWFjZDc1ODc5ZDA1ZjY2MjFjMmVhOGRkOTU1ZTY4ZWUzZTA3YjQxYjlkNTNmNGM4Y2M5OTI5YTY4YTY3In0sInR5cGUiOiJjb3NpZ24gY29udGFpbmVyIGltYWdlIHNpZ25hdHVyZSJ9LCJvcHRpb25hbCI6bnVsbH0=", + "Cert": null, + "Chain": null, + "Bundle": { + "SignedEntryTimestamp": "MEUCIC9oXH9VVP96frVOmDw704FBqMN/Bpm2RMdTm6BtSwL/AiEA6mCIjhV65fYuy4CwjsIzQHi/oW6IBwtd6oCvN2dI6HQ=", + "Payload": { + "body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiJmMjEwNDJjY2RjOGU0ZjA1ZGEzNmE5ZjU4ODg5MmFlZGRlMzYzZTQ2ZWNjZGZjM2MyNzAyMTkwZDU0YTdmZmVlIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FWUNJUUNWaTJGUFI3Mjl1aHAvY3JFdUNTOW9yQzRhMnV0OHN3dDdTUnZXYUVSTGp3SWhBSlM1dzU3MHhsQnJsM2Nhd1Y1akQ1dk85RGh1dkNrdCtzOXJLdGc2NzVKQSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=", + "integratedTime": 1738752154, + "logIndex": 168898587, + "logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d" + } + }, + "RFC3161Timestamp": null + } +] \ No newline at end of file diff --git a/tests/assets/signatures/tempered/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/220b52200e3e47b1b42010667fcaa9338681e64dd3e34a34873866cb051d694e.json b/tests/assets/signatures/tempered/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/220b52200e3e47b1b42010667fcaa9338681e64dd3e34a34873866cb051d694e.json new file mode 100644 index 0000000..8bb1af4 --- /dev/null +++ b/tests/assets/signatures/tempered/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/220b52200e3e47b1b42010667fcaa9338681e64dd3e34a34873866cb051d694e.json @@ -0,0 +1,18 @@ +[ + { + "Base64Signature": "MEQCICi2AOAJbS1k3334VMSo+qxaI4f5VoNnuVExZ4tfIu7rAiAiwuKdo8rGfFMGMLSFSQvoLF3JuwFy4JtNW6kQlwH7vg==", + "Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9oNHh4MHIvZGFuZ2Vyem9uZS9kYW5nZXJ6b25lIn0sImltYWdlIjp7ImRvY2tlci1tYW5pZmVzdC1kaWdlc3QiOiJzaGEyNTY6MjIwYjUyMjAwZTNlNDdiMWI0MjAxMDY2N2ZjYWE5MzM4NjgxZTY0ZGQzZTM0YTM0ODczODY2Y2IwNTFkNjk0ZSJ9LCJ0eXBlIjoiY29zaWduIGNvbnRhaW5lciBpbWFnZSBzaWduYXR1cmUifSwib3B0aW9uYWwiOm51bGx9Cg==", + "Cert": null, + "Chain": null, + "Bundle": { + "SignedEntryTimestamp": "MEUCIEvx6NtFeAag9TplqMLjVczT/tC6lpKe9SnrxbehBlxfAiEA07BE3f5JsMLsUsmHD58D6GaZr2yz+yQ66Os2ps8oKz8=", + "Payload": { + "body": "eyJhcGlWZXJzaW9uIjoiNi42LjYiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiI4YmJmNGRiNjBmMmExM2IyNjI2NTI3MzljNWM5ZTYwNjNiMDYyNjVlODU1Zjc3MTdjMTdlYWY4YzViZTQyYWUyIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FUUNJQ2kyQU9BSmJTMWszMzM0Vk1TbytxeGFJNGY1Vm9ObnVWRXhaNHRmSXU3ckFpQWl3dUtkbzhyR2ZGTUdNTFNGU1F2b0xGM0p1d0Z5NEp0Tlc2a1Fsd0g3dmc9PSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0K", + "integratedTime": 1738859497, + "logIndex": 169356501, + "logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d" + } + }, + "RFC3161Timestamp": null + } +] \ No newline at end of file diff --git a/tests/assets/signatures/tempered/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/README.md b/tests/assets/signatures/tempered/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/README.md new file mode 100644 index 0000000..16819a4 --- /dev/null +++ b/tests/assets/signatures/tempered/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/README.md @@ -0,0 +1 @@ +This folder contain signatures which have been tempered-with. The goal is to have signatures that looks valid, but actually aren't. diff --git a/tests/assets/signatures/valid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/19e8eacd75879d05f6621c2ea8dd955e68ee3e07b41b9d53f4c8cc9929a68a67.json b/tests/assets/signatures/valid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/19e8eacd75879d05f6621c2ea8dd955e68ee3e07b41b9d53f4c8cc9929a68a67.json new file mode 100644 index 0000000..01db986 --- /dev/null +++ b/tests/assets/signatures/valid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/19e8eacd75879d05f6621c2ea8dd955e68ee3e07b41b9d53f4c8cc9929a68a67.json @@ -0,0 +1 @@ +[{"Base64Signature": "MEYCIQCVi2FPR729uhp/crEuCS9orC4a2ut8swt7SRvWaERLjwIhAJS5w570xlBrl3cawV5jD5vO9DhuvCkt+s9rKtg675JA", "Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hbG1ldC9kYW5nZXJ6b25lL2RhbmdlcnpvbmUifSwiaW1hZ2UiOnsiZG9ja2VyLW1hbmlmZXN0LWRpZ2VzdCI6InNoYTI1NjoxOWU4ZWFjZDc1ODc5ZDA1ZjY2MjFjMmVhOGRkOTU1ZTY4ZWUzZTA3YjQxYjlkNTNmNGM4Y2M5OTI5YTY4YTY3In0sInR5cGUiOiJjb3NpZ24gY29udGFpbmVyIGltYWdlIHNpZ25hdHVyZSJ9LCJvcHRpb25hbCI6bnVsbH0=", "Cert": null, "Chain": null, "Bundle": {"SignedEntryTimestamp": "MEUCIC9oXH9VVP96frVOmDw704FBqMN/Bpm2RMdTm6BtSwL/AiEA6mCIjhV65fYuy4CwjsIzQHi/oW6IBwtd6oCvN2dI6HQ=", "Payload": {"body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiJmMjEwNDJjY2RjOGU0ZjA1ZGEzNmE5ZjU4ODg5MmFlZGRlMzYzZTQ2ZWNjZGZjM2MyNzAyMTkwZDU0YTdmZmVlIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FWUNJUUNWaTJGUFI3Mjl1aHAvY3JFdUNTOW9yQzRhMnV0OHN3dDdTUnZXYUVSTGp3SWhBSlM1dzU3MHhsQnJsM2Nhd1Y1akQ1dk85RGh1dkNrdCtzOXJLdGc2NzVKQSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=", "integratedTime": 1738752154, "logIndex": 168898587, "logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d"}}, "RFC3161Timestamp": null}] \ No newline at end of file diff --git a/tests/assets/signatures/valid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/220b52200e3e47b1b42010667fcaa9338681e64dd3e34a34873866cb051d694e.json b/tests/assets/signatures/valid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/220b52200e3e47b1b42010667fcaa9338681e64dd3e34a34873866cb051d694e.json new file mode 100644 index 0000000..8827c9c --- /dev/null +++ b/tests/assets/signatures/valid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/220b52200e3e47b1b42010667fcaa9338681e64dd3e34a34873866cb051d694e.json @@ -0,0 +1 @@ +[{"Base64Signature": "MEQCICi2AOAJbS1k3334VMSo+qxaI4f5VoNnuVExZ4tfIu7rAiAiwuKdo8rGfFMGMLSFSQvoLF3JuwFy4JtNW6kQlwH7vg==", "Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hbG1ldC9kYW5nZXJ6b25lL2RhbmdlcnpvbmUifSwiaW1hZ2UiOnsiZG9ja2VyLW1hbmlmZXN0LWRpZ2VzdCI6InNoYTI1NjoyMjBiNTIyMDBlM2U0N2IxYjQyMDEwNjY3ZmNhYTkzMzg2ODFlNjRkZDNlMzRhMzQ4NzM4NjZjYjA1MWQ2OTRlIn0sInR5cGUiOiJjb3NpZ24gY29udGFpbmVyIGltYWdlIHNpZ25hdHVyZSJ9LCJvcHRpb25hbCI6bnVsbH0=", "Cert": null, "Chain": null, "Bundle": {"SignedEntryTimestamp": "MEUCIEvx6NtFeAag9TplqMLjVczT/tC6lpKe9SnrxbehBlxfAiEA07BE3f5JsMLsUsmHD58D6GaZr2yz+yQ66Os2ps8oKz8=", "Payload": {"body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiI4YmJmNGRiNjBmMmExM2IyNjI2NTI3MzljNWM5ZTYwNjNiMDYyNjVlODU1Zjc3MTdjMTdlYWY4YzViZTQyYWUyIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FUUNJQ2kyQU9BSmJTMWszMzM0Vk1TbytxeGFJNGY1Vm9ObnVWRXhaNHRmSXU3ckFpQWl3dUtkbzhyR2ZGTUdNTFNGU1F2b0xGM0p1d0Z5NEp0Tlc2a1Fsd0g3dmc9PSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=", "integratedTime": 1738859497, "logIndex": 169356501, "logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d"}}, "RFC3161Timestamp": null}] \ No newline at end of file diff --git a/tests/assets/signatures/valid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/4da441235e84e93518778827a5c5745d532d7a4079886e1647924bee7ef1c14d.json b/tests/assets/signatures/valid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/4da441235e84e93518778827a5c5745d532d7a4079886e1647924bee7ef1c14d.json new file mode 100644 index 0000000..fd13e9c --- /dev/null +++ b/tests/assets/signatures/valid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/4da441235e84e93518778827a5c5745d532d7a4079886e1647924bee7ef1c14d.json @@ -0,0 +1 @@ +[{"Base64Signature": "MEQCIDJxvB7lBU+VNYBD0xw/3Bi8wY7GPJ2fBP7mUFbguApoAiAIpuQT+sgatOY6yXkkA8K/sM40d5/gt7jQywWPbq5+iw==", "Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hcHlyZ2lvL2RhbmdlcnpvbmUvZGFuZ2Vyem9uZSJ9LCJpbWFnZSI6eyJkb2NrZXItbWFuaWZlc3QtZGlnZXN0Ijoic2hhMjU2OjRkYTQ0MTIzNWU4NGU5MzUxODc3ODgyN2E1YzU3NDVkNTMyZDdhNDA3OTg4NmUxNjQ3OTI0YmVlN2VmMWMxNGQifSwidHlwZSI6ImNvc2lnbiBjb250YWluZXIgaW1hZ2Ugc2lnbmF0dXJlIn0sIm9wdGlvbmFsIjpudWxsfQ==", "Cert": null, "Chain": null, "Bundle": {"SignedEntryTimestamp": "MEYCIQDuuuHoyZ2i4HKxik4Ju/MWkELwc1w5SfzcpCV7G+vZHAIhAO25R/+lIfQ/kMfC4PfeoWDwLpvnH9cq6dVSzl12i1su", "Payload": {"body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiIyMGE2ZDU1NTk4Y2U0NjU3NWZkZjViZGU3YzhhYWE2YTU2ZjZlMGRmOWNiYTY1MTJhMDAxODhjMTU1NGIzYjE3In19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FUUNJREp4dkI3bEJVK1ZOWUJEMHh3LzNCaTh3WTdHUEoyZkJQN21VRmJndUFwb0FpQUlwdVFUK3NnYXRPWTZ5WGtrQThLL3NNNDBkNS9ndDdqUXl3V1BicTUraXc9PSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=", "integratedTime": 1738688492, "logIndex": 168652066, "logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d"}}, "RFC3161Timestamp": null}] \ No newline at end of file diff --git a/tests/assets/signatures/valid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/7b21dbdebffed855621dfcdeaa52230dc6566997f852ef5d62b0338b46796e01.json b/tests/assets/signatures/valid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/7b21dbdebffed855621dfcdeaa52230dc6566997f852ef5d62b0338b46796e01.json new file mode 100644 index 0000000..e857c4b --- /dev/null +++ b/tests/assets/signatures/valid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/7b21dbdebffed855621dfcdeaa52230dc6566997f852ef5d62b0338b46796e01.json @@ -0,0 +1 @@ +[{"Base64Signature": "MEUCIQC2WlJH+B8VuX1c6i4sDwEGEZc53hXUD6/ds9TMJ3HrfwIgCxSnrNYRD2c8XENqfqc+Ik1gx0DK9kPNsn/Lt8V/dCo=", "Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hbG1ldC9kYW5nZXJ6b25lL2RhbmdlcnpvbmUifSwiaW1hZ2UiOnsiZG9ja2VyLW1hbmlmZXN0LWRpZ2VzdCI6InNoYTI1Njo3YjIxZGJkZWJmZmVkODU1NjIxZGZjZGVhYTUyMjMwZGM2NTY2OTk3Zjg1MmVmNWQ2MmIwMzM4YjQ2Nzk2ZTAxIn0sInR5cGUiOiJjb3NpZ24gY29udGFpbmVyIGltYWdlIHNpZ25hdHVyZSJ9LCJvcHRpb25hbCI6bnVsbH0=", "Cert": null, "Chain": null, "Bundle": {"SignedEntryTimestamp": "MEYCIQDn04gOHqiZcwUO+NVV9+29+abu6O/k1ve9zatJ3gVu9QIhAJL3E+mqVPdMPfMSdhHt2XDQsYzfRDDJNJEABQlbV3Jg", "Payload": {"body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiIzZWQwNWJlYTc2ZWFmMzBmYWM1NzBlNzhlODBlZmQxNDNiZWQxNzFjM2VjMDY5MWI2MDU3YjdhMDAzNGEyMzhlIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FVUNJUUMyV2xKSCtCOFZ1WDFjNmk0c0R3RUdFWmM1M2hYVUQ2L2RzOVRNSjNIcmZ3SWdDeFNuck5ZUkQyYzhYRU5xZnFjK0lrMWd4MERLOWtQTnNuL0x0OFYvZENvPSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=", "integratedTime": 1738862352, "logIndex": 169369149, "logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d"}}, "RFC3161Timestamp": null}] \ No newline at end of file diff --git a/tests/assets/signatures/valid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/fa948726aac29a6ac49f01ec8fbbac18522b35b2491fdf716236a0b3502a2ca7.json b/tests/assets/signatures/valid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/fa948726aac29a6ac49f01ec8fbbac18522b35b2491fdf716236a0b3502a2ca7.json new file mode 100644 index 0000000..660dbbf --- /dev/null +++ b/tests/assets/signatures/valid/95b432860b272938246b10e1cfc89a24e1db352b3aebaa799c4284c42c46bd95/fa948726aac29a6ac49f01ec8fbbac18522b35b2491fdf716236a0b3502a2ca7.json @@ -0,0 +1 @@ +[{"Base64Signature": "MEQCIHqXEMuAmt1pFCsHC71+ejlG5kjKrf1+AQW202OY3vhsAiA0BoDAVgAk9K7SgIRBpIV6u0veyB1iypzV0DteNh3IoQ==", "Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hbG1ldC9kYW5nZXJ6b25lL2RhbmdlcnpvbmUifSwiaW1hZ2UiOnsiZG9ja2VyLW1hbmlmZXN0LWRpZ2VzdCI6InNoYTI1NjpmYTk0ODcyNmFhYzI5YTZhYzQ5ZjAxZWM4ZmJiYWMxODUyMmIzNWIyNDkxZmRmNzE2MjM2YTBiMzUwMmEyY2E3In0sInR5cGUiOiJjb3NpZ24gY29udGFpbmVyIGltYWdlIHNpZ25hdHVyZSJ9LCJvcHRpb25hbCI6bnVsbH0=", "Cert": null, "Chain": null, "Bundle": {"SignedEntryTimestamp": "MEUCIQCrZ+2SSYdpIOEbyUXXaBxeqT8RTujpqdXipls9hmNvDgIgdWV84PiCY2cI49QjHjun7lj25/znGMDiwjCuPjIPA6Q=", "Payload": {"body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiI5ZjcwM2I4NTM4MjM4N2U2OTgwNzYxNDg1YzU0NGIzNmJmMThmNTA5ODQwMTMxYzRmOTJhMjE4OTI3MTJmNDJmIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FUUNJSHFYRU11QW10MXBGQ3NIQzcxK2VqbEc1a2pLcmYxK0FRVzIwMk9ZM3Zoc0FpQTBCb0RBVmdBazlLN1NnSVJCcElWNnUwdmV5QjFpeXB6VjBEdGVOaDNJb1E9PSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=", "integratedTime": 1737478056, "logIndex": 164177381, "logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d"}}, "RFC3161Timestamp": null}, {"Base64Signature": "MEYCIQDg8MeymBLOn+Khue0yK1yQy4Fu/+GXmyC/xezXO/p1JgIhAN6QLojKzkZGxyYirbqRbZCVcIM4YN3Y18FXwpW4RuUy", "Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hbG1ldC9kYW5nZXJ6b25lL2RhbmdlcnpvbmUifSwiaW1hZ2UiOnsiZG9ja2VyLW1hbmlmZXN0LWRpZ2VzdCI6InNoYTI1NjpmYTk0ODcyNmFhYzI5YTZhYzQ5ZjAxZWM4ZmJiYWMxODUyMmIzNWIyNDkxZmRmNzE2MjM2YTBiMzUwMmEyY2E3In0sInR5cGUiOiJjb3NpZ24gY29udGFpbmVyIGltYWdlIHNpZ25hdHVyZSJ9LCJvcHRpb25hbCI6bnVsbH0=", "Cert": null, "Chain": null, "Bundle": {"SignedEntryTimestamp": "MEUCIQCQLlrH2xo/bA6r386vOwA0OjUe0TqcxROT/Wo220jvGgIgPgRlKnQxWoXlD/Owf1Ogk5XlfXAt2f416LDbk4AoEvk=", "Payload": {"body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiI5ZjcwM2I4NTM4MjM4N2U2OTgwNzYxNDg1YzU0NGIzNmJmMThmNTA5ODQwMTMxYzRmOTJhMjE4OTI3MTJmNDJmIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FWUNJUURnOE1leW1CTE9uK0todWUweUsxeVF5NEZ1LytHWG15Qy94ZXpYTy9wMUpnSWhBTjZRTG9qS3prWkd4eVlpcmJxUmJaQ1ZjSU00WU4zWTE4Rlh3cFc0UnVVeSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=", "integratedTime": 1737557525, "logIndex": 164445483, "logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d"}}, "RFC3161Timestamp": null}, {"Base64Signature": "MEQCIEhUVYVW6EdovGDSSZt1Ffc86OfzEKAas94M4eFK7hoFAiA4+6219LktmgJSKuc2ObsnL5QjHyNLk58BwY0s8gBHbQ==", "Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hbG1ldC9kYW5nZXJ6b25lL2RhbmdlcnpvbmUifSwiaW1hZ2UiOnsiZG9ja2VyLW1hbmlmZXN0LWRpZ2VzdCI6InNoYTI1NjpmYTk0ODcyNmFhYzI5YTZhYzQ5ZjAxZWM4ZmJiYWMxODUyMmIzNWIyNDkxZmRmNzE2MjM2YTBiMzUwMmEyY2E3In0sInR5cGUiOiJjb3NpZ24gY29udGFpbmVyIGltYWdlIHNpZ25hdHVyZSJ9LCJvcHRpb25hbCI6bnVsbH0=", "Cert": null, "Chain": null, "Bundle": {"SignedEntryTimestamp": "MEQCIDRUTMwL+/eW79ARRLE8h/ByCrvo0rOn3vUYQg1E6KIBAiBi/bzoqcL2Ik27KpwfFosww4l7yI+9IqwCvUlkQgEB7g==", "Payload": {"body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiI5ZjcwM2I4NTM4MjM4N2U2OTgwNzYxNDg1YzU0NGIzNmJmMThmNTA5ODQwMTMxYzRmOTJhMjE4OTI3MTJmNDJmIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FUUNJRWhVVllWVzZFZG92R0RTU1p0MUZmYzg2T2Z6RUtBYXM5NE00ZUZLN2hvRkFpQTQrNjIxOUxrdG1nSlNLdWMyT2Jzbkw1UWpIeU5MazU4QndZMHM4Z0JIYlE9PSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=", "integratedTime": 1737567664, "logIndex": 164484602, "logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d"}}, "RFC3161Timestamp": null}] \ No newline at end of file diff --git a/tests/assets/test.pub.key b/tests/assets/test.pub.key new file mode 100644 index 0000000..a36dd82 --- /dev/null +++ b/tests/assets/test.pub.key @@ -0,0 +1,4 @@ +-----BEGIN PUBLIC KEY----- +MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEoE0CXLGff79fR8KyPnSvOY74UBkt +2sLi+aVFUzS1Qwt4wosxHhcDN2B6QSsLlvgsH82q6qcA6PL2SdS/p4jWGA== +-----END PUBLIC KEY----- diff --git a/tests/test_signatures.py b/tests/test_signatures.py new file mode 100644 index 0000000..8eafa9c --- /dev/null +++ b/tests/test_signatures.py @@ -0,0 +1,221 @@ +import json +import unittest +from pathlib import Path + +import pytest +from pytest_subprocess import FakeProcess + +from dangerzone.updater import errors +from dangerzone.updater.signatures import ( + get_config_dir, + get_last_log_index, + get_log_index_from_signatures, + get_remote_signatures, + is_update_available, + load_and_verify_signatures, + prepare_airgapped_archive, + store_signatures, + upgrade_container_image, + verify_local_image, + verify_signature, + verify_signatures, +) + +ASSETS_PATH = Path(__file__).parent / "assets" +TEST_PUBKEY_PATH = ASSETS_PATH / "test.pub.key" +INVALID_SIGNATURES_PATH = ASSETS_PATH / "signatures" / "invalid" +VALID_SIGNATURES_PATH = ASSETS_PATH / "signatures" / "valid" +TEMPERED_SIGNATURES_PATH = ASSETS_PATH / "signatures" / "tempered" + + +def test_load_valid_signatures(mocker): + mocker.patch("dangerzone.updater.signatures.SIGNATURES_PATH", VALID_SIGNATURES_PATH) + valid_signatures = list(VALID_SIGNATURES_PATH.glob("**/*.json")) + assert len(valid_signatures) > 0 + for file in valid_signatures: + signatures = load_and_verify_signatures(file.stem, TEST_PUBKEY_PATH) + assert isinstance(signatures, list) + assert len(signatures) > 0 + + +def test_load_invalid_signatures(mocker): + mocker.patch( + "dangerzone.updater.signatures.SIGNATURES_PATH", INVALID_SIGNATURES_PATH + ) + invalid_signatures = list(INVALID_SIGNATURES_PATH.glob("**/*.json")) + assert len(invalid_signatures) > 0 + for file in invalid_signatures: + with pytest.raises(errors.SignatureError): + load_and_verify_signatures(file.stem, TEST_PUBKEY_PATH) + + +def test_load_tempered_signatures(mocker): + mocker.patch( + "dangerzone.updater.signatures.SIGNATURES_PATH", TEMPERED_SIGNATURES_PATH + ) + tempered_signatures = list(TEMPERED_SIGNATURES_PATH.glob("**/*.json")) + assert len(tempered_signatures) > 0 + for file in tempered_signatures: + with pytest.raises(errors.SignatureError): + load_and_verify_signatures(file.stem, TEST_PUBKEY_PATH) + + +def test_get_log_index_from_signatures(): + signatures = [{"Bundle": {"Payload": {"logIndex": 1}}}] + assert get_log_index_from_signatures(signatures) == 1 + + +def test_get_log_index_from_signatures_empty(): + signatures = [] + assert get_log_index_from_signatures(signatures) == 0 + + +def test_get_log_index_from_malformed_signatures(): + signatures = [{"Bundle": {"Payload": {"logIndex": "foo"}}}] + assert get_log_index_from_signatures(signatures) == 0 + + +def test_get_log_index_from_missing_log_index(): + signatures = [{"Bundle": {"Payload": {}}}] + assert get_log_index_from_signatures(signatures) == 0 + + +def test_upgrade_container_image_if_already_up_to_date(mocker): + mocker.patch( + "dangerzone.updater.signatures.is_update_available", return_value=(False, None) + ) + with pytest.raises(errors.ImageAlreadyUpToDate): + upgrade_container_image( + "ghcr.io/freedomofpress/dangerzone/dangerzone", "sha256:123456", "test.pub" + ) + + +def test_upgrade_container_without_signatures(mocker): + mocker.patch( + "dangerzone.updater.signatures.is_update_available", + return_value=(True, "sha256:123456"), + ) + mocker.patch("dangerzone.updater.signatures.get_remote_signatures", return_value=[]) + with pytest.raises(errors.SignatureVerificationError): + upgrade_container_image( + "ghcr.io/freedomofpress/dangerzone/dangerzone", + "sha256:123456", + "test.pub", + ) + + +def test_upgrade_container_lower_log_index(mocker): + image_digest = "4da441235e84e93518778827a5c5745d532d7a4079886e1647924bee7ef1c14d" + signatures = load_and_verify_signatures( + image_digest, + TEST_PUBKEY_PATH, + bypass_verification=True, + signatures_path=VALID_SIGNATURES_PATH, + ) + mocker.patch( + "dangerzone.updater.signatures.is_update_available", + return_value=( + True, + image_digest, + ), + ) + mocker.patch( + "dangerzone.updater.signatures.get_remote_signatures", + return_value=signatures, + ) + # Mock to avoid loosing time on test failures + mocker.patch("dangerzone.container_utils.container_pull") + # The log index of the incoming signatures is 168652066 + mocker.patch( + "dangerzone.updater.signatures.get_last_log_index", + return_value=168652067, + ) + + with pytest.raises(errors.InvalidLogIndex): + upgrade_container_image( + "ghcr.io/freedomofpress/dangerzone/dangerzone", + image_digest, + TEST_PUBKEY_PATH, + ) + + +def test_prepare_airgapped_archive_requires_digest(): + with pytest.raises(errors.AirgappedImageDownloadError): + prepare_airgapped_archive( + "ghcr.io/freedomofpress/dangerzone/dangerzone", "test.tar" + ) + + +def test_get_remote_signatures_error(fp: FakeProcess, mocker): + image = "ghcr.io/freedomofpress/dangerzone/dangerzone" + digest = "123456" + mocker.patch("dangerzone.updater.cosign.ensure_installed", return_value=True) + fp.register_subprocess( + ["cosign", "download", "signature", f"{image}@sha256:{digest}"], returncode=1 + ) + with pytest.raises(errors.NoRemoteSignatures): + get_remote_signatures(image, digest) + + +def test_get_remote_signatures_empty(fp: FakeProcess, mocker): + image = "ghcr.io/freedomofpress/dangerzone/dangerzone" + digest = "123456" + mocker.patch("dangerzone.updater.cosign.ensure_installed", return_value=True) + fp.register_subprocess( + ["cosign", "download", "signature", f"{image}@sha256:{digest}"], + stdout=json.dumps([]), + ) + with pytest.raises(errors.NoRemoteSignatures): + get_remote_signatures(image, digest) + + +def test_get_remote_signatures_cosign_error(): + pass + + +def test_verify_local_image_no_signatures(): + pass + + +def test_verify_local_image_invalid_signatures(): + pass + + +def test_verify_local_image(): + pass + + +def test_store_signatures_with_different_digests(): + pass + + +def test_store_signatures_digest_mismatch(): + pass + + +def test_stores_signatures_updates_last_log_index(): + pass + + +def test_get_file_digest(): + pass + + +def test_convert_oci_images_signatures(): + pass + + +def test_is_update_available_nothing_local(): + pass + + +def test_is_update_available_trims(): + pass + + +def test_verify_signature_wrong_payload_digest(): + pass + + +def test_verify_signatures_not_0(): + pass From 0f2d81dbd60e046b8e9b6ac063197751648a4108 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Thu, 13 Feb 2025 19:12:25 +0100 Subject: [PATCH 31/31] (WIP) some more tests --- dangerzone/updater/signatures.py | 3 +- tests/conftest.py | 12 ++++++++ tests/test_signatures.py | 53 +++++++++++++++++++++----------- 3 files changed, 48 insertions(+), 20 deletions(-) diff --git a/dangerzone/updater/signatures.py b/dangerzone/updater/signatures.py index a407c2b..2b3c676 100644 --- a/dangerzone/updater/signatures.py +++ b/dangerzone/updater/signatures.py @@ -401,8 +401,7 @@ def get_remote_signatures(image: str, digest: str) -> List[Dict]: # Remove the last return, split on newlines, convert from JSON signatures_raw = process.stdout.decode("utf-8").strip().split("\n") - signatures = list(map(json.loads, signatures_raw)) - breakpoint() + signatures = list(filter(bool, map(json.loads, signatures_raw))) if len(signatures) < 1: raise errors.NoRemoteSignatures("No signatures found for the image") return signatures diff --git a/tests/conftest.py b/tests/conftest.py index 3bef1af..4ac4aa0 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -13,6 +13,13 @@ from dangerzone.gui import Application sys.dangerzone_dev = True # type: ignore[attr-defined] +ASSETS_PATH = Path(__file__).parent / "assets" +TEST_PUBKEY_PATH = ASSETS_PATH / "test.pub.key" +INVALID_SIGNATURES_PATH = ASSETS_PATH / "signatures" / "invalid" +VALID_SIGNATURES_PATH = ASSETS_PATH / "signatures" / "valid" +TEMPERED_SIGNATURES_PATH = ASSETS_PATH / "signatures" / "tempered" + + # Use this fixture to make `pytest-qt` invoke our custom QApplication. # See https://pytest-qt.readthedocs.io/en/latest/qapplication.html#testing-custom-qapplications @pytest.fixture(scope="session") @@ -133,6 +140,11 @@ for_each_doc = pytest.mark.parametrize( ) +@pytest.fixture +def signature(): + return {} + + # External Docs - base64 docs encoded for externally sourced documents # XXX to reduce the chance of accidentally opening them test_docs_external_dir = Path(__file__).parent.joinpath(SAMPLE_EXTERNAL_DIRECTORY) diff --git a/tests/test_signatures.py b/tests/test_signatures.py index 8eafa9c..d62c670 100644 --- a/tests/test_signatures.py +++ b/tests/test_signatures.py @@ -28,6 +28,21 @@ VALID_SIGNATURES_PATH = ASSETS_PATH / "signatures" / "valid" TEMPERED_SIGNATURES_PATH = ASSETS_PATH / "signatures" / "tempered" +@pytest.fixture +def valid_signature(): + signature_file = next(VALID_SIGNATURES_PATH.glob("**/*.json")) + with open(signature_file, "r") as signature_file: + signatures = json.load(signature_file) + return signatures.pop() + + +@pytest.fixture +def signature_other_digest(valid_signature): + signature = valid_signature.copy() + signature["Bundle"]["Payload"]["digest"] = "sha256:123456" + return signature + + def test_load_valid_signatures(mocker): mocker.patch("dangerzone.updater.signatures.SIGNATURES_PATH", VALID_SIGNATURES_PATH) valid_signatures = list(VALID_SIGNATURES_PATH.glob("**/*.json")) @@ -163,29 +178,30 @@ def test_get_remote_signatures_empty(fp: FakeProcess, mocker): mocker.patch("dangerzone.updater.cosign.ensure_installed", return_value=True) fp.register_subprocess( ["cosign", "download", "signature", f"{image}@sha256:{digest}"], - stdout=json.dumps([]), + stdout=json.dumps({}), ) with pytest.raises(errors.NoRemoteSignatures): get_remote_signatures(image, digest) -def test_get_remote_signatures_cosign_error(): - pass +def test_get_remote_signatures_cosign_error(mocker, fp: FakeProcess): + image = "ghcr.io/freedomofpress/dangerzone/dangerzone" + digest = "123456" + mocker.patch("dangerzone.updater.cosign.ensure_installed", return_value=True) + fp.register_subprocess( + ["cosign", "download", "signature", f"{image}@sha256:{digest}"], + returncode=1, + stderr="Error: no signatures associated", + ) + with pytest.raises(errors.NoRemoteSignatures): + get_remote_signatures(image, digest) -def test_verify_local_image_no_signatures(): - pass - - -def test_verify_local_image_invalid_signatures(): - pass - - -def test_verify_local_image(): - pass - - -def test_store_signatures_with_different_digests(): +def test_store_signatures_with_different_digests( + valid_signature, signature_other_digest +): + signatures = [valid_signature, signature_other_digest] + breakpoint() pass @@ -217,5 +233,6 @@ def test_verify_signature_wrong_payload_digest(): pass -def test_verify_signatures_not_0(): - pass +def test_verify_signatures_empty_list(): + with pytest.raises(errors.SignatureVerificationError): + verify_signatures([], "1234", TEST_PUBKEY_PATH)