From 13449641ca730d608d37a5fac83b8ee32f5275e0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Wed, 27 Nov 2024 14:44:05 +0100 Subject: [PATCH 01/18] Build: Use Github runners to build and sign container images on new tags --- .github/workflows/release-container-image.yml | 56 +++++++++++++++++++ 1 file changed, 56 insertions(+) create mode 100644 .github/workflows/release-container-image.yml diff --git a/.github/workflows/release-container-image.yml b/.github/workflows/release-container-image.yml new file mode 100644 index 0000000..be05626 --- /dev/null +++ b/.github/workflows/release-container-image.yml @@ -0,0 +1,56 @@ +# This action listens on new tags, generates a new container image +# sign it and upload it to the container registry. + +name: Release container image +on: + push: + tags: + - "container-image/**" + branches: + - "test/image-**" + workflow_dispatch: + +permissions: + id-token: write + packages: write + contents: read + attestations: write + +env: + REGISTRY: ghcr.io/${{ github.repository_owner }} + REGISTRY_USER: ${{ github.actor }} + REGISTRY_PASSWORD: ${{ github.token }} + IMAGE_NAME: dangerzone/dangerzone + +jobs: + build-container-image: + runs-on: ubuntu-24.04 + steps: + - uses: actions/checkout@v4 + - name: Login to GitHub Container Registry + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: USERNAME + password: ${{ github.token }} + + - name: Build and push the dangerzone image + id: build-image + run: | + sudo apt-get install -y python3-poetry + python3 ./install/common/build-image.py + echo ${{ github.token }} | podman login ghcr.io -u USERNAME --password-stdin + + # Load the image with the final name directly + gunzip -c share/container.tar.gz | podman load + FINAL_IMAGE_NAME="${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}" + podman tag dangerzone.rocks/dangerzone "$FINAL_IMAGE_NAME" + podman push "$FINAL_IMAGE_NAME" --digestfile=digest + echo "digest=$(cat digest)" >> "$GITHUB_OUTPUT" + + - name: Generate artifact attestation + uses: actions/attest-build-provenance@v1 + with: + subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + subject-digest: "${{ steps.build-image.outputs.digest }}" + push-to-registry: true From ac62a153dc72dd0cb1e5591fbc6e82fa26e9c20f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Mon, 20 Jan 2025 14:25:26 +0100 Subject: [PATCH 02/18] Checkout with depth:0 otherwise git commands aren't functional --- .github/workflows/release-container-image.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/release-container-image.yml b/.github/workflows/release-container-image.yml index be05626..9947284 100644 --- a/.github/workflows/release-container-image.yml +++ b/.github/workflows/release-container-image.yml @@ -27,6 +27,9 @@ jobs: runs-on: ubuntu-24.04 steps: - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Login to GitHub Container Registry uses: docker/login-action@v3 with: From 2a80bf0c2686904bd0f22c8332acf870995157e3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Mon, 20 Jan 2025 14:46:51 +0100 Subject: [PATCH 03/18] Get the tag from git before retagging it --- .github/workflows/release-container-image.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/release-container-image.yml b/.github/workflows/release-container-image.yml index 9947284..7177e93 100644 --- a/.github/workflows/release-container-image.yml +++ b/.github/workflows/release-container-image.yml @@ -47,7 +47,8 @@ jobs: # Load the image with the final name directly gunzip -c share/container.tar.gz | podman load FINAL_IMAGE_NAME="${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}" - podman tag dangerzone.rocks/dangerzone "$FINAL_IMAGE_NAME" + TAG=$(git describe --long --first-parent | tail -c +2) + podman tag dangerzone.rocks/dangerzone:$TAG "$FINAL_IMAGE_NAME" podman push "$FINAL_IMAGE_NAME" --digestfile=digest echo "digest=$(cat digest)" >> "$GITHUB_OUTPUT" From 891ffe4fec4c405f176ef214936b2bcc60d50f9a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Mon, 20 Jan 2025 15:16:13 +0100 Subject: [PATCH 04/18] Add the tag to the subject --- .github/workflows/release-container-image.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/release-container-image.yml b/.github/workflows/release-container-image.yml index 7177e93..752c27f 100644 --- a/.github/workflows/release-container-image.yml +++ b/.github/workflows/release-container-image.yml @@ -51,10 +51,11 @@ jobs: podman tag dangerzone.rocks/dangerzone:$TAG "$FINAL_IMAGE_NAME" podman push "$FINAL_IMAGE_NAME" --digestfile=digest echo "digest=$(cat digest)" >> "$GITHUB_OUTPUT" + echo "tag=$TAG" >> "$GITHUB_OUTPUT" - name: Generate artifact attestation uses: actions/attest-build-provenance@v1 with: - subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.build-image.outputs.tag }} subject-digest: "${{ steps.build-image.outputs.digest }}" push-to-registry: true From 554736cab3ef41dd5515254b4e185cea4d88cbd7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Mon, 20 Jan 2025 15:25:51 +0100 Subject: [PATCH 05/18] Remove the tag from the attestation, what we attest is the hash, so no need for it --- .github/workflows/release-container-image.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/release-container-image.yml b/.github/workflows/release-container-image.yml index 752c27f..7177e93 100644 --- a/.github/workflows/release-container-image.yml +++ b/.github/workflows/release-container-image.yml @@ -51,11 +51,10 @@ jobs: podman tag dangerzone.rocks/dangerzone:$TAG "$FINAL_IMAGE_NAME" podman push "$FINAL_IMAGE_NAME" --digestfile=digest echo "digest=$(cat digest)" >> "$GITHUB_OUTPUT" - echo "tag=$TAG" >> "$GITHUB_OUTPUT" - name: Generate artifact attestation uses: actions/attest-build-provenance@v1 with: - subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.build-image.outputs.tag }} + subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} subject-digest: "${{ steps.build-image.outputs.digest }}" push-to-registry: true From f0ac1f885f769592313bac71c1e582455b3294ca Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Mon, 20 Jan 2025 16:02:18 +0100 Subject: [PATCH 06/18] Add logs --- .github/workflows/release-container-image.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/release-container-image.yml b/.github/workflows/release-container-image.yml index 7177e93..0995261 100644 --- a/.github/workflows/release-container-image.yml +++ b/.github/workflows/release-container-image.yml @@ -29,6 +29,9 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 + - name: Check it's working + run: | + git describe --long --first-parent - name: Login to GitHub Container Registry uses: docker/login-action@v3 From 8f49cd99eb5f80213cfd37bd2bbc25c5f1097fc9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Mon, 20 Jan 2025 16:56:24 +0100 Subject: [PATCH 07/18] FIXUP: test --- .github/workflows/release-container-image.yml | 6 ------ 1 file changed, 6 deletions(-) diff --git a/.github/workflows/release-container-image.yml b/.github/workflows/release-container-image.yml index 0995261..13e0d00 100644 --- a/.github/workflows/release-container-image.yml +++ b/.github/workflows/release-container-image.yml @@ -1,6 +1,3 @@ -# This action listens on new tags, generates a new container image -# sign it and upload it to the container registry. - name: Release container image on: push: @@ -29,9 +26,6 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 - - name: Check it's working - run: | - git describe --long --first-parent - name: Login to GitHub Container Registry uses: docker/login-action@v3 From 58176506331cf81517299df6e9d82e4566019a86 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Wed, 22 Jan 2025 15:21:10 +0100 Subject: [PATCH 08/18] Add a script to verify Github attestations --- dev_scripts/registry.py | 240 ++++++++++++++++++ .../independent-container-updates.md | 23 ++ 2 files changed, 263 insertions(+) create mode 100755 dev_scripts/registry.py create mode 100644 docs/developer/independent-container-updates.md diff --git a/dev_scripts/registry.py b/dev_scripts/registry.py new file mode 100755 index 0000000..9b26420 --- /dev/null +++ b/dev_scripts/registry.py @@ -0,0 +1,240 @@ +#!/usr/bin/python + +import hashlib +import re +import shutil +import subprocess +from tempfile import NamedTemporaryFile + +import click +import requests + +DEFAULT_REPO = "freedomofpress/dangerzone" +SIGSTORE_BUNDLE = "application/vnd.dev.sigstore.bundle.v0.3+json" +DOCKER_MANIFEST_DISTRIBUTION = "application/vnd.docker.distribution.manifest.v2+json" +DOCKER_MANIFEST_INDEX = "application/vnd.oci.image.index.v1+json" +OCI_IMAGE_MANIFEST = "application/vnd.oci.image.manifest.v1+json" + + +class RegistryClient: + def __init__(self, registry, org, image): + self._registry = registry + self._org = org + self._image = image + self._auth_token = None + self._base_url = f"https://{registry}" + self._image_url = f"{self._base_url}/v2/{self._org}/{self._image}" + + @property + def image(self): + return f"{self._registry}/{self._org}/{self._image}" + + def get_auth_token(self): + if not self._auth_token: + auth_url = f"{self._base_url}/token" + response = requests.get( + auth_url, + params={ + "service": f"{self._registry}", + "scope": f"repository:{self._org}/{self._image}:pull", + }, + ) + response.raise_for_status() + self._auth_token = response.json()["token"] + return self._auth_token + + def get_auth_header(self): + return {"Authorization": f"Bearer {self.get_auth_token()}"} + + def list_tags(self): + url = f"{self._image_url}/tags/list" + response = requests.get(url, headers=self.get_auth_header()) + response.raise_for_status() + tags = response.json().get("tags", []) + return tags + + def get_manifest(self, tag, extra_headers=None): + """Get manifest information for a specific tag""" + manifest_url = f"{self._image_url}/manifests/{tag}" + headers = { + "Accept": DOCKER_MANIFEST_DISTRIBUTION, + "Authorization": f"Bearer {self.get_auth_token()}", + } + if extra_headers: + headers.update(extra_headers) + + response = requests.get(manifest_url, headers=headers) + response.raise_for_status() + return response + + def list_manifests(self, tag): + return ( + self.get_manifest( + tag, + { + "Accept": DOCKER_MANIFEST_INDEX, + }, + ) + .json() + .get("manifests") + ) + + def get_blob(self, hash): + url = f"{self._image_url}/blobs/{hash}" + response = requests.get( + url, + headers={ + "Authorization": f"Bearer {self.get_auth_token()}", + }, + ) + response.raise_for_status() + return response + + def get_attestation(self, tag): + """ + Retrieve an attestation from a given tag. + + The attestation needs to be attached using the Cosign Bundle + Specification defined at: + + https://github.com/sigstore/cosign/blob/main/specs/BUNDLE_SPEC.md + """ + + def _find_sigstore_bundle_manifest(manifests): + for manifest in manifests: + if manifest["artifactType"] == SIGSTORE_BUNDLE: + return manifest["mediaType"], manifest["digest"] + + def _get_bundle_blob_digest(layers): + for layer in layers: + if layer.get("mediaType") == SIGSTORE_BUNDLE: + return layer["digest"] + + tag_manifest_content = self.get_manifest(tag).content + + # The attestation is available on the same container registry, with a + # specific tag named "sha256-{sha256(manifest)}" + tag_manifest_hash = hashlib.sha256(tag_manifest_content).hexdigest() + + # This will get us a "list" of manifests... + manifests = self.list_manifests(f"sha256-{tag_manifest_hash}") + + # ... from which we want the sigstore bundle + bundle_manifest_mediatype, bundle_manifest_digest = ( + _find_sigstore_bundle_manifest(manifests) + ) + if not bundle_manifest_digest: + raise Error("Not able to find sigstore bundle manifest info") + + bundle_manifest = self.get_manifest( + bundle_manifest_digest, extra_headers={"Accept": bundle_manifest_mediatype} + ).json() + + # From there, we will get the attestation in a blob. + # It will be the first layer listed at this manifest hash location + layers = bundle_manifest.get("layers", []) + + blob_digest = _get_bundle_blob_digest(layers) + bundle = self.get_blob(blob_digest) + return tag_manifest_content, bundle.content + + def verify_attestation(self, image_tag: str, expected_repo: str): + """ + Look up the image attestation to see if the image has been built + on Github runners, and from a given repository. + """ + manifest, bundle = self.get_attestation(image_tag) + + def _write(file, content): + file.write(content) + file.flush() + + # Put the value in files and verify with cosign + with ( + NamedTemporaryFile(mode="wb") as manifest_json, + NamedTemporaryFile(mode="wb") as bundle_json, + ): + _write(manifest_json, manifest) + _write(bundle_json, bundle) + + # Call cosign with the temporary file paths + cmd = [ + "cosign", + "verify-blob-attestation", + "--bundle", + bundle_json.name, + "--new-bundle-format", + "--certificate-oidc-issuer", + "https://token.actions.githubusercontent.com", + "--certificate-identity-regexp", + f"^https://github.com/{expected_repo}/.github/workflows/release-container-image.yml@refs/heads/test/image-publication-cosign", + manifest_json.name, + ] + + result = subprocess.run(cmd, capture_output=True) + if result.returncode != 0: + raise Exception(f"Attestation cannot be verified. {result.stderr}") + return True + + +def parse_image_location(input_string): + """Parses container image location into (registry, namespace, repository, tag)""" + pattern = ( + r"^" + r"(?P[a-zA-Z0-9.-]+)/" + r"(?P[a-zA-Z0-9-]+)/" + r"(?P[^:]+)" + r"(?::(?P[a-zA-Z0-9.-]+))?" + r"$" + ) + match = re.match(pattern, input_string) + if not match: + raise ValueError("Malformed image location") + return match.group("registry", "namespace", "repository", "tag") + + +@click.group() +def main(): + pass + + +@main.command() +@click.argument("image") +def list_tags(image): + registry, org, package, _ = parse_image_location(image) + client = RegistryClient(registry, org, package) + tags = client.list_tags() + click.echo(f"Existing tags for {client.image}") + for tag in tags: + click.echo(tag) + + +@main.command() +@click.argument("image") +@click.option( + "--repo", + default=DEFAULT_REPO, + help="The github repository to check the attestation for", +) +def attest(image: str, repo: str): + """ + Look up the image attestation to see if the image has been built + on Github runners, and from a given repository. + """ + if shutil.which("cosign") is None: + click.echo("The cosign binary is needed but not installed.") + raise click.Abort() + + registry, org, package, tag = parse_image_location(image) + tag = tag or "latest" + + client = RegistryClient(registry, org, package) + verified = client.verify_attestation(tag, repo) + if verified: + click.echo( + f"🎉 The image available at `{client.image}:{tag}` has been built by Github Runners from the `{repo}` repository" + ) + + +if __name__ == "__main__": + main() diff --git a/docs/developer/independent-container-updates.md b/docs/developer/independent-container-updates.md new file mode 100644 index 0000000..25a7d43 --- /dev/null +++ b/docs/developer/independent-container-updates.md @@ -0,0 +1,23 @@ +# Independent Container Updates + +Since version 0.9.0, Dangerzone is able to ship container images independently +from issuing a new release of the software. + +This is useful as images need to be kept updated with the latest security fixes. + +## Nightly images and attestations + +Each night, new images are built and pushed to our container registry, alongside +with a provenance attestation, enabling anybody to ensure that the image has +been originally built by Github CI runners, from a defined source repository (in our case `freedomofpress/dangerzone`). + +To verify the attestations against our expectations, use the following command: +```bash +poetry run ./dev_scripts/registry.py attest ghcr.io/freedomofpress/dangerzone/dangerzone:latest --repo freedomofpress/dangerzone +``` + +In case of sucess, it will report back: + +``` +🎉 The image available at `ghcr.io/freedomofpress/dangerzone/dangerzone:latest` has been built by Github runners from the `freedomofpress/dangerzone` repository. +``` From bcd1ec21730f23870646a4f5534dbf9f2bac3940 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Wed, 22 Jan 2025 16:06:06 +0100 Subject: [PATCH 09/18] Add an utility to retrieve manifest info --- dev_scripts/registry.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/dev_scripts/registry.py b/dev_scripts/registry.py index 9b26420..b688056 100755 --- a/dev_scripts/registry.py +++ b/dev_scripts/registry.py @@ -209,6 +209,16 @@ def list_tags(image): click.echo(tag) +@main.command() +@click.argument("image") +@click.argument("tag") +def get_manifest(image, tag): + registry, org, package, _ = parse_image_location(image) + client = RegistryClient(registry, org, package) + resp = client.get_manifest(tag, extra_headers={"Accept": OCI_IMAGE_MANIFEST}) + click.echo(resp.content) + + @main.command() @click.argument("image") @click.option( From 47252cc31d375584c1dc37ddb8532a1a1c5cda09 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Tue, 28 Jan 2025 11:12:21 +0100 Subject: [PATCH 10/18] Automate the verification of image signatures --- dev_scripts/registry.py | 207 +++++++++++++++++++++++++++++++++------- 1 file changed, 171 insertions(+), 36 deletions(-) diff --git a/dev_scripts/registry.py b/dev_scripts/registry.py index b688056..d883d81 100755 --- a/dev_scripts/registry.py +++ b/dev_scripts/registry.py @@ -1,9 +1,12 @@ #!/usr/bin/python import hashlib +import json +import platform import re import shutil import subprocess +from base64 import b64decode from tempfile import NamedTemporaryFile import click @@ -90,6 +93,12 @@ class RegistryClient: response.raise_for_status() return response + def get_manifest_hash(self, tag, tag_manifest_content=None): + if not tag_manifest_content: + tag_manifest_content = self.get_manifest(tag).content + + return hashlib.sha256(tag_manifest_content).hexdigest() + def get_attestation(self, tag): """ Retrieve an attestation from a given tag. @@ -114,7 +123,7 @@ class RegistryClient: # The attestation is available on the same container registry, with a # specific tag named "sha256-{sha256(manifest)}" - tag_manifest_hash = hashlib.sha256(tag_manifest_content).hexdigest() + tag_manifest_hash = self.get_manifest_hash(tag, tag_manifest_content) # This will get us a "list" of manifests... manifests = self.list_manifests(f"sha256-{tag_manifest_hash}") @@ -138,43 +147,153 @@ class RegistryClient: bundle = self.get_blob(blob_digest) return tag_manifest_content, bundle.content - def verify_attestation(self, image_tag: str, expected_repo: str): - """ - Look up the image attestation to see if the image has been built - on Github runners, and from a given repository. - """ - manifest, bundle = self.get_attestation(image_tag) - def _write(file, content): - file.write(content) - file.flush() +def _write(file, content): + file.write(content) + file.flush() - # Put the value in files and verify with cosign - with ( - NamedTemporaryFile(mode="wb") as manifest_json, - NamedTemporaryFile(mode="wb") as bundle_json, - ): - _write(manifest_json, manifest) - _write(bundle_json, bundle) - # Call cosign with the temporary file paths - cmd = [ - "cosign", - "verify-blob-attestation", - "--bundle", - bundle_json.name, - "--new-bundle-format", - "--certificate-oidc-issuer", - "https://token.actions.githubusercontent.com", - "--certificate-identity-regexp", - f"^https://github.com/{expected_repo}/.github/workflows/release-container-image.yml@refs/heads/test/image-publication-cosign", - manifest_json.name, - ] +def verify_attestation( + registry_client: RegistryClient, image_tag: str, expected_repo: str +): + """ + Look up the image attestation to see if the image has been built + on Github runners, and from a given repository. + """ + manifest, bundle = registry_client.get_attestation(image_tag) - result = subprocess.run(cmd, capture_output=True) - if result.returncode != 0: - raise Exception(f"Attestation cannot be verified. {result.stderr}") - return True + # Put the value in files and verify with cosign + with ( + NamedTemporaryFile(mode="wb") as manifest_json, + NamedTemporaryFile(mode="wb") as bundle_json, + ): + _write(manifest_json, manifest) + _write(bundle_json, bundle) + + # Call cosign with the temporary file paths + cmd = [ + "cosign", + "verify-blob-attestation", + "--bundle", + bundle_json.name, + "--new-bundle-format", + "--certificate-oidc-issuer", + "https://token.actions.githubusercontent.com", + "--certificate-identity-regexp", + f"^https://github.com/{expected_repo}/.github/workflows/release-container-image.yml@refs/heads/test/image-publication-cosign", + manifest_json.name, + ] + + result = subprocess.run(cmd, capture_output=True) + if result.returncode != 0: + raise Exception(f"Attestation cannot be verified. {result.stderr}") + return True + + +def new_version_available(): + # XXX - Implement + return True + + +def check_signature(signature_bundle, pub_key): + """Ensure that the signature bundle has been signed by the given public key.""" + + # Put the value in files and verify with cosign + with ( + NamedTemporaryFile(mode="w") as signature_file, + NamedTemporaryFile(mode="bw") as payload_file, + ): + json.dump(signature_bundle, signature_file) + signature_file.flush() + + payload_bytes = b64decode(signature_bundle["Payload"]) + _write(payload_file, payload_bytes) + + cmd = [ + "cosign", + "verify-blob", + "--key", + pub_key, + "--bundle", + signature_file.name, + payload_file.name, + ] + result = subprocess.run(cmd, capture_output=True) + if result.returncode != 0: + return False + return result.stderr == b"Verified OK\n" + + +def get_runtime_name() -> str: + if platform.system() == "Linux": + return "podman" + return "docker" + + +def container_pull(image): + cmd = [get_runtime_name(), "pull", f"{image}"] + process = subprocess.Popen(cmd, stdout=subprocess.PIPE) + process.communicate() + + +def upgrade_container_image(image, tag, pub_key, registry: RegistryClient): + if not new_version_available(): + return + + hash = registry.get_manifest_hash(tag) + signatures = get_signatures(image, hash) + if len(signatures) < 1: + raise Exception("Unable to retrieve signatures") + + print(f"Found {len(signatures)} signature(s) for {image}") + for signature in signatures: + signature_is_valid = check_signature(signature, pub_key) + if not signature_is_valid: + raise Exception("Unable to verify signature") + print("✅ Signature is valid") + + # At this point, the signature is verified, let's upgrade + # XXX Use the hash here to avoid race conditions + container_pull(image) + + +def get_signatures(image, hash): + """ + Retrieve the signatures from cosign download signature and convert each one to the "cosign bundle" format. + """ + + def _to_bundle(sig): + # Convert cosign-download signatures to the format expected by cosign bundle. + bundle = sig["Bundle"] + payload = bundle["Payload"] + return { + "base64Signature": sig["Base64Signature"], + "Payload": sig["Payload"], + "cert": sig["Cert"], + "chain": sig["Chain"], + "rekorBundle": { + "SignedEntryTimestamp": bundle["SignedEntryTimestamp"], + "Payload": { + "body": payload["body"], + "integratedTime": payload["integratedTime"], + "logIndex": payload["logIndex"], + "logID": payload["logID"], + }, + }, + "RFC3161Timestamp": sig["RFC3161Timestamp"], + } + + process = subprocess.run( + ["cosign", "download", "signature", f"{image}@sha256:{hash}"], + capture_output=True, + check=True, + ) + + # XXX: Check the output first. + signatures_raw = process.stdout.decode("utf-8").strip().split("\n") + + # Remove the last return, split on newlines, convert from JSON + return [_to_bundle(json.loads(sig)) for sig in signatures_raw] def parse_image_location(input_string): @@ -190,7 +309,13 @@ def parse_image_location(input_string): match = re.match(pattern, input_string) if not match: raise ValueError("Malformed image location") - return match.group("registry", "namespace", "repository", "tag") + + return ( + match.group("registry"), + match.group("namespace"), + match.group("repository"), + match.group("tag") or "latest", + ) @click.group() @@ -198,6 +323,16 @@ def main(): pass +@main.command() +@click.argument("image") +@click.option("--pubkey", default="pub.key") +def upgrade_image(image, pubkey): + registry, org, package, tag = parse_image_location(image) + registry_client = RegistryClient(registry, org, package) + + upgrade_container_image(image, tag, pubkey, registry_client) + + @main.command() @click.argument("image") def list_tags(image): @@ -239,7 +374,7 @@ def attest(image: str, repo: str): tag = tag or "latest" client = RegistryClient(registry, org, package) - verified = client.verify_attestation(tag, repo) + verified = verify_attestation(client, tag, repo) if verified: click.echo( f"🎉 The image available at `{client.image}:{tag}` has been built by Github Runners from the `{repo}` repository" From cbd4795bf612dbb14f20f435ecf947cc93c3c30e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Tue, 28 Jan 2025 15:51:46 +0100 Subject: [PATCH 11/18] Verify podman/docker images against locally stored signatures --- dev_scripts/registry.py | 238 +++++++++++++++++++++++++++++++--------- 1 file changed, 185 insertions(+), 53 deletions(-) diff --git a/dev_scripts/registry.py b/dev_scripts/registry.py index d883d81..c5e5f4a 100755 --- a/dev_scripts/registry.py +++ b/dev_scripts/registry.py @@ -7,11 +7,24 @@ import re import shutil import subprocess from base64 import b64decode +from pathlib import Path from tempfile import NamedTemporaryFile import click import requests +try: + import platformdirs +except ImportError: + import appdirs as platformdirs + + +def get_config_dir() -> str: + return Path(platformdirs.user_config_dir("dangerzone")) + + +SIGNATURES_PATH = get_config_dir() / "signatures" + DEFAULT_REPO = "freedomofpress/dangerzone" SIGSTORE_BUNDLE = "application/vnd.dev.sigstore.bundle.v0.3+json" DOCKER_MANIFEST_DISTRIBUTION = "application/vnd.docker.distribution.manifest.v2+json" @@ -190,13 +203,37 @@ def verify_attestation( return True -def new_version_available(): +def new_image_release(): # XXX - Implement return True -def check_signature(signature_bundle, pub_key): - """Ensure that the signature bundle has been signed by the given public key.""" +def signature_to_bundle(sig): + # Convert cosign-download signatures to the format expected by cosign bundle. + bundle = sig["Bundle"] + payload = bundle["Payload"] + return { + "base64Signature": sig["Base64Signature"], + "Payload": sig["Payload"], + "cert": sig["Cert"], + "chain": sig["Chain"], + "rekorBundle": { + "SignedEntryTimestamp": bundle["SignedEntryTimestamp"], + "Payload": { + "body": payload["body"], + "integratedTime": payload["integratedTime"], + "logIndex": payload["logIndex"], + "logID": payload["logID"], + }, + }, + "RFC3161Timestamp": sig["RFC3161Timestamp"], + } + + +def verify_signature(signature, pubkey): + """Verify a signature against a given public key""" + + signature_bundle = signature_to_bundle(signature) # Put the value in files and verify with cosign with ( @@ -213,13 +250,14 @@ def check_signature(signature_bundle, pub_key): "cosign", "verify-blob", "--key", - pub_key, + pubkey, "--bundle", signature_file.name, payload_file.name, ] result = subprocess.run(cmd, capture_output=True) if result.returncode != 0: + # XXX Raise instead? return False return result.stderr == b"Verified OK\n" @@ -236,53 +274,116 @@ def container_pull(image): process.communicate() -def upgrade_container_image(image, tag, pub_key, registry: RegistryClient): - if not new_version_available(): +def upgrade_container_image(image, tag, pubkey, registry: RegistryClient): + if not new_image_release(): return hash = registry.get_manifest_hash(tag) signatures = get_signatures(image, hash) + if len(signatures) < 1: raise Exception("Unable to retrieve signatures") print(f"Found {len(signatures)} signature(s) for {image}") for signature in signatures: - signature_is_valid = check_signature(signature, pub_key) + signature_is_valid = verify_signature(signature, pubkey) if not signature_is_valid: raise Exception("Unable to verify signature") print("✅ Signature is valid") - # At this point, the signature is verified, let's upgrade + # At this point, the signatures are verified + # We store the signatures just now to avoid storing unverified signatures + store_signatures(signatures, hash, pubkey) + + # let's upgrade the image # XXX Use the hash here to avoid race conditions container_pull(image) +def get_file_hash(file): + with open(file, "rb") as f: + content = f.read() + return hashlib.sha256(content).hexdigest() + + +def load_signatures(image_hash, pubkey): + pubkey_signatures = SIGNATURES_PATH / get_file_hash(pubkey) + if not pubkey_signatures.exists(): + msg = ( + f"Cannot find a '{pubkey_signatures}' folder." + "You might need to download the image signatures first." + ) + raise Exception(msg) + + with open(pubkey_signatures / f"{image_hash}.json") as f: + return json.load(f) + + +def store_signatures(signatures, image_hash, pubkey): + """ + Store signatures locally in the SIGNATURE_PATH folder, like this: + + ~/.config/dangerzone/signatures/ + └── + └── .json + └── .json + + The format used in the `.json` file is the one of `cosign download + signature`, which differs from the "bundle" one used afterwards. + + It can be converted to the one expected by cosign verify --bundle with + the `signature_to_bundle()` function. + """ + + def _get_digest(sig): + payload = json.loads(b64decode(sig["Payload"])) + return payload["critical"]["image"]["docker-manifest-digest"] + + # All the signatures should share the same hash. + hashes = list(map(_get_digest, signatures)) + if len(set(hashes)) != 1: + raise Exception("Signatures do not share the same image hash") + + if f"sha256:{image_hash}" != hashes[0]: + raise Exception("Signatures do not match the given image hash") + + pubkey_signatures = SIGNATURES_PATH / get_file_hash(pubkey) + pubkey_signatures.mkdir(exist_ok=True) + + with open(pubkey_signatures / f"{image_hash}.json", "w") as f: + json.dump(signatures, f) + + +def verify_local_image_signature(image, pubkey): + """ + Verifies that a local image has a valid signature + """ + image_hash = get_image_hash(image) + signatures = load_signatures(image_hash, pubkey) + if len(signatures) < 1: + raise Exception("No signatures found") + + for signature in signatures: + if not verify_signature(signature, pubkey): + msg = f"Unable to verify signature for {image} with pubkey {pubkey}" + raise Exception(msg) + return True + + +def get_image_hash(image): + """ + Returns a image hash from a local image name + """ + cmd = [get_runtime_name(), "image", "inspect", image, "-f", "{{.Digest}}"] + result = subprocess.run(cmd, capture_output=True, check=True) + return result.stdout.strip().decode().strip("sha256:") + + def get_signatures(image, hash): """ Retrieve the signatures from cosign download signature and convert each one to the "cosign bundle" format. """ - def _to_bundle(sig): - # Convert cosign-download signatures to the format expected by cosign bundle. - bundle = sig["Bundle"] - payload = bundle["Payload"] - return { - "base64Signature": sig["Base64Signature"], - "Payload": sig["Payload"], - "cert": sig["Cert"], - "chain": sig["Chain"], - "rekorBundle": { - "SignedEntryTimestamp": bundle["SignedEntryTimestamp"], - "Payload": { - "body": payload["body"], - "integratedTime": payload["integratedTime"], - "logIndex": payload["logIndex"], - "logID": payload["logID"], - }, - }, - "RFC3161Timestamp": sig["RFC3161Timestamp"], - } - process = subprocess.run( ["cosign", "download", "signature", f"{image}@sha256:{hash}"], capture_output=True, @@ -290,32 +391,54 @@ def get_signatures(image, hash): ) # XXX: Check the output first. - signatures_raw = process.stdout.decode("utf-8").strip().split("\n") - # Remove the last return, split on newlines, convert from JSON - return [_to_bundle(json.loads(sig)) for sig in signatures_raw] + signatures_raw = process.stdout.decode("utf-8").strip().split("\n") + return list(map(json.loads, signatures_raw)) -def parse_image_location(input_string): - """Parses container image location into (registry, namespace, repository, tag)""" - pattern = ( - r"^" - r"(?P[a-zA-Z0-9.-]+)/" - r"(?P[a-zA-Z0-9-]+)/" - r"(?P[^:]+)" - r"(?::(?P[a-zA-Z0-9.-]+))?" - r"$" - ) - match = re.match(pattern, input_string) - if not match: - raise ValueError("Malformed image location") +class Image: + def __init__(self, registry, namespace, repository, tag="latest"): + self.registry = registry + self.namespace = namespace + self.repository = repository + self.tag = tag - return ( - match.group("registry"), - match.group("namespace"), - match.group("repository"), - match.group("tag") or "latest", - ) + def properties(self): + return (self.registry, self.namespace, self.repository, self.tag) + + @property + def name_without_tag(self): + return f"{self.registry}/{self.namespace}/{self.repository}" + + @property + def name_with_tag(self): + return f"{self.name_without_tag}:{self.tag}" + + @classmethod + def from_string(cls, input_string): + """Parses container image location into (registry, namespace, repository, tag)""" + pattern = ( + r"^" + r"(?P[a-zA-Z0-9.-]+)/" + r"(?P[a-zA-Z0-9-]+)/" + r"(?P[^:]+)" + r"(?::(?P[a-zA-Z0-9.-]+))?" + r"$" + ) + match = re.match(pattern, input_string) + if not match: + raise ValueError("Malformed image location") + + return cls( + match.group("registry"), + match.group("namespace"), + match.group("repository"), + match.group("tag") or "latest", + ) + + +def parse_image_location(string): + return Image.from_string(string).properties @click.group() @@ -327,12 +450,21 @@ def main(): @click.argument("image") @click.option("--pubkey", default="pub.key") def upgrade_image(image, pubkey): - registry, org, package, tag = parse_image_location(image) - registry_client = RegistryClient(registry, org, package) + registry, namespace, repository, tag = parse_image_location(image) + registry_client = RegistryClient(registry, namespace, repository) upgrade_container_image(image, tag, pubkey, registry_client) +@main.command() +@click.argument("image") +@click.option("--pubkey", default="pub.key") +def verify_local_image(image, pubkey): + # XXX remove a potentiel :tag + if verify_local_image_signature(image, pubkey): + click.echo(f"✅ The local image {image} has been signed with {pubkey}") + + @main.command() @click.argument("image") def list_tags(image): From d0ab34b4226ef9448efbab8229e585c3ebe2a7fd Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Wed, 29 Jan 2025 15:08:50 +0100 Subject: [PATCH 12/18] Move regsitry and cosign utilities to `dangerzone/updater/*`. Placing these inside the `dangerzone` python package enables an inclusion with the software itself, and also makes it possible for end-users to attest the image. --- dangerzone/updater/__init__.py | 0 dangerzone/updater/attestations.py | 41 +++ dangerzone/updater/cli.py | 83 +++++ dangerzone/updater/registry.py | 193 +++++++++++ dangerzone/updater/signatures.py | 233 +++++++++++++ dangerzone/updater/utils.py | 3 + dev_scripts/registry.py | 517 ----------------------------- pyproject.toml | 1 + 8 files changed, 554 insertions(+), 517 deletions(-) create mode 100644 dangerzone/updater/__init__.py create mode 100644 dangerzone/updater/attestations.py create mode 100644 dangerzone/updater/cli.py create mode 100644 dangerzone/updater/registry.py create mode 100644 dangerzone/updater/signatures.py create mode 100644 dangerzone/updater/utils.py delete mode 100755 dev_scripts/registry.py diff --git a/dangerzone/updater/__init__.py b/dangerzone/updater/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/dangerzone/updater/attestations.py b/dangerzone/updater/attestations.py new file mode 100644 index 0000000..8839e1c --- /dev/null +++ b/dangerzone/updater/attestations.py @@ -0,0 +1,41 @@ +import subprocess +from tempfile import NamedTemporaryFile + +from .utils import write + + +def verify_attestation( + manifest: bytes, attestation_bundle: bytes, image_tag: str, expected_repo: str +): + """ + Look up the image attestation to see if the image has been built + on Github runners, and from a given repository. + """ + + # Put the value in files and verify with cosign + with ( + NamedTemporaryFile(mode="wb") as manifest_json, + NamedTemporaryFile(mode="wb") as attestation_bundle_json, + ): + write(manifest_json, manifest) + write(attestation_bundle_json, attestation_bundle) + + # Call cosign with the temporary file paths + cmd = [ + "cosign", + "verify-blob-attestation", + "--bundle", + attestation_bundle_json.name, + "--new-bundle-format", + "--certificate-oidc-issuer", + "https://token.actions.githubusercontent.com", + "--certificate-identity-regexp", + f"^https://github.com/{expected_repo}/.github/workflows/release-container-image.yml@refs/heads/test/image-publication-cosign", + manifest_json.name, + ] + + result = subprocess.run(cmd, capture_output=True) + if result.returncode != 0: + error = result.stderr.decode() + raise Exception(f"Attestation cannot be verified. {error}") + return True diff --git a/dangerzone/updater/cli.py b/dangerzone/updater/cli.py new file mode 100644 index 0000000..6d3c9fb --- /dev/null +++ b/dangerzone/updater/cli.py @@ -0,0 +1,83 @@ +#!/usr/bin/python + +import click + +from . import registry +from .attestations import verify_attestation +from .signatures import upgrade_container_image, verify_offline_image_signature + +DEFAULT_REPO = "freedomofpress/dangerzone" + + +@click.group() +def main(): + pass + + +@main.command() +@click.argument("image") +@click.option("--pubkey", default="pub.key") +# XXX Add options to do airgap upgrade +def upgrade(image, pubkey): + manifest_hash = registry.get_manifest_hash(image) + if upgrade_container_image(image, manifest_hash, pubkey): + click.echo(f"✅ The local image {image} has been upgraded") + + +@main.command() +@click.argument("image") +@click.option("--pubkey", default="pub.key") +def verify_local(image, pubkey): + """ + XXX document + """ + # XXX remove a potentiel :tag + if verify_offline_image_signature(image, pubkey): + click.echo(f"✅ The local image {image} has been signed with {pubkey}") + + +@main.command() +@click.argument("image") +def list_tags(image): + click.echo(f"Existing tags for {client.image}") + for tag in registry.list_tags(image): + click.echo(tag) + + +@main.command() +@click.argument("image") +@click.argument("tag") +def get_manifest(image, tag): + click.echo(registry.get_manifest(image, tag)) + + +@main.command() +@click.argument("image") +@click.option( + "--repo", + default=DEFAULT_REPO, + help="The github repository to check the attestation for", +) +# XXX use a consistent naming for these cli commands +def attest(image: str, repo: str): + """ + Look up the image attestation to see if the image has been built + on Github runners, and from a given repository. + """ + # XXX put this inside a module + # if shutil.which("cosign") is None: + # click.echo("The cosign binary is needed but not installed.") + # raise click.Abort() + # XXX: refactor parse_image_location to return a dict. + _, _, _, image_tag = registry.parse_image_location(image) + manifest, bundle = registry.get_attestation(image) + + verified = verify_attestation(manifest, bundle, image_tag, repo) + if verified: + click.echo( + f"🎉 The image available at `{client.image}:{image_tag}` has been built by Github Runners from the `{repo}` repository" + ) + + +if __name__ == "__main__": + main() diff --git a/dangerzone/updater/registry.py b/dangerzone/updater/registry.py new file mode 100644 index 0000000..381124f --- /dev/null +++ b/dangerzone/updater/registry.py @@ -0,0 +1,193 @@ +import hashlib +import re +from typing import Dict, Optional, Tuple + +import requests + +__all__ = [ + "get_manifest_hash", + "list_tags", +] + +SIGSTORE_BUNDLE = "application/vnd.dev.sigstore.bundle.v0.3+json" +DOCKER_MANIFEST_DISTRIBUTION = "application/vnd.docker.distribution.manifest.v2+json" +DOCKER_MANIFEST_INDEX = "application/vnd.oci.image.index.v1+json" +OCI_IMAGE_MANIFEST = "application/vnd.oci.image.manifest.v1+json" + + +def parse_image_location(input_string: str) -> Tuple[str, str, str, str]: + """Parses container image location into (registry, namespace, repository, tag)""" + pattern = ( + r"^" + r"(?P[a-zA-Z0-9.-]+)/" + r"(?P[a-zA-Z0-9-]+)/" + r"(?P[^:]+)" + r"(?::(?P[a-zA-Z0-9.-]+))?" + r"$" + ) + match = re.match(pattern, input_string) + if not match: + raise ValueError("Malformed image location") + + return ( + match.group("registry"), + match.group("namespace"), + match.group("repository"), + match.group("tag") or "latest", + ) + + +class RegistryClient: + def __init__(self, registry: str, org: str, image: str): + self._registry = registry + self._org = org + self._image = image + self._auth_token = None + self._base_url = f"https://{registry}" + self._image_url = f"{self._base_url}/v2/{self._org}/{self._image}" + + @property + def image(self): + return f"{self._registry}/{self._org}/{self._image}" + + def get_auth_token(self) -> Optional[str]: + if not self._auth_token: + auth_url = f"{self._base_url}/token" + response = requests.get( + auth_url, + params={ + "service": f"{self._registry}", + "scope": f"repository:{self._org}/{self._image}:pull", + }, + ) + response.raise_for_status() + self._auth_token = response.json()["token"] + return self._auth_token + + def get_auth_header(self) -> Dict[str, str]: + return {"Authorization": f"Bearer {self.get_auth_token()}"} + + def list_tags(self) -> list: + url = f"{self._image_url}/tags/list" + response = requests.get(url, headers=self.get_auth_header()) + response.raise_for_status() + tags = response.json().get("tags", []) + return tags + + def get_manifest(self, tag, extra_headers=None) -> requests.Response: + """Get manifest information for a specific tag""" + manifest_url = f"{self._image_url}/manifests/{tag}" + headers = { + "Accept": DOCKER_MANIFEST_DISTRIBUTION, + "Authorization": f"Bearer {self.get_auth_token()}", + } + if extra_headers: + headers.update(extra_headers) + + response = requests.get(manifest_url, headers=headers) + response.raise_for_status() + return response + + def list_manifests(self, tag) -> list: + return ( + self.get_manifest( + tag, + { + "Accept": DOCKER_MANIFEST_INDEX, + }, + ) + .json() + .get("manifests") + ) + + def get_blob(self, hash) -> requests.Response: + url = f"{self._image_url}/blobs/{hash}" + response = requests.get( + url, + headers={ + "Authorization": f"Bearer {self.get_auth_token()}", + }, + ) + response.raise_for_status() + return response + + def get_manifest_hash(self, tag, tag_manifest_content=None) -> str: + if not tag_manifest_content: + tag_manifest_content = self.get_manifest(tag).content + + return hashlib.sha256(tag_manifest_content).hexdigest() + + def get_attestation(self, tag) -> Tuple[bytes, bytes]: + """ + Retrieve an attestation from a given tag. + + The attestation needs to be attached using the Cosign Bundle + Specification defined at: + + https://github.com/sigstore/cosign/blob/main/specs/BUNDLE_SPEC.md + + Returns a tuple with the tag manifest content and the bundle content. + """ + + def _find_sigstore_bundle_manifest(manifests): + for manifest in manifests: + if manifest["artifactType"] == SIGSTORE_BUNDLE: + return manifest["mediaType"], manifest["digest"] + + def _get_bundle_blob_digest(layers): + for layer in layers: + if layer.get("mediaType") == SIGSTORE_BUNDLE: + return layer["digest"] + + tag_manifest_content = self.get_manifest(tag).content + + # The attestation is available on the same container registry, with a + # specific tag named "sha256-{sha256(manifest)}" + tag_manifest_hash = self.get_manifest_hash(tag, tag_manifest_content) + + # This will get us a "list" of manifests... + manifests = self.list_manifests(f"sha256-{tag_manifest_hash}") + + # ... from which we want the sigstore bundle + bundle_manifest_mediatype, bundle_manifest_digest = ( + _find_sigstore_bundle_manifest(manifests) + ) + if not bundle_manifest_digest: + raise Exception("Not able to find sigstore bundle manifest info") + + bundle_manifest = self.get_manifest( + bundle_manifest_digest, extra_headers={"Accept": bundle_manifest_mediatype} + ).json() + + # From there, we will get the attestation in a blob. + # It will be the first layer listed at this manifest hash location + layers = bundle_manifest.get("layers", []) + + blob_digest = _get_bundle_blob_digest(layers) + bundle = self.get_blob(blob_digest) + return tag_manifest_content, bundle.content + + +def get_manifest_hash(image: str) -> str: + registry, org, package, tag = parse_image_location(image) + client = RegistryClient(registry, org, package) + return client.get_manifest_hash(tag) + + +def list_tags(image: str) -> list: + registry, org, package, _ = parse_image_location(image) + client = RegistryClient(registry, org, package) + return client.list_tags() + + +def get_manifest(image: str, tag: str) -> bytes: + registry, org, package, _ = parse_image_location(image) + client = RegistryClient(registry, org, package) + resp = client.get_manifest(tag, extra_headers={"Accept": OCI_IMAGE_MANIFEST}) + return resp.content + + +def get_attestation(image: str) -> Tuple[bytes, bytes]: + registry, org, package, tag = parse_image_location(image) + client = RegistryClient(registry, org, package) + return client.get_attestation(tag) diff --git a/dangerzone/updater/signatures.py b/dangerzone/updater/signatures.py new file mode 100644 index 0000000..42154c2 --- /dev/null +++ b/dangerzone/updater/signatures.py @@ -0,0 +1,233 @@ +import json +import platform +import re +import subprocess +from base64 import b64decode +from hashlib import sha256 +from pathlib import Path +from tempfile import NamedTemporaryFile +from typing import Dict, List, Tuple + +from .registry import RegistryClient +from .utils import write + +try: + import platformdirs +except ImportError: + import appdirs as platformdirs + + +def get_config_dir() -> Path: + return Path(platformdirs.user_config_dir("dangerzone")) + + +# XXX Store this somewhere else. +SIGNATURES_PATH = get_config_dir() / "signatures" +__all__ = [ + "verify_signature", + "load_signatures", + "store_signatures", + "verify_local_image_signature", +] + + +def signature_to_bundle(sig: Dict) -> Dict: + """Convert a cosign-download signature to the format expected by cosign bundle.""" + bundle = sig["Bundle"] + payload = bundle["Payload"] + return { + "base64Signature": sig["Base64Signature"], + "Payload": sig["Payload"], + "cert": sig["Cert"], + "chain": sig["Chain"], + "rekorBundle": { + "SignedEntryTimestamp": bundle["SignedEntryTimestamp"], + "Payload": { + "body": payload["body"], + "integratedTime": payload["integratedTime"], + "logIndex": payload["logIndex"], + "logID": payload["logID"], + }, + }, + "RFC3161Timestamp": sig["RFC3161Timestamp"], + } + + +def verify_signature(signature: dict, pubkey: str) -> bool: + """Verify a signature against a given public key""" + + signature_bundle = signature_to_bundle(signature) + + # Put the value in files and verify with cosign + with ( + NamedTemporaryFile(mode="w") as signature_file, + NamedTemporaryFile(mode="bw") as payload_file, + ): + json.dump(signature_bundle, signature_file) + signature_file.flush() + + payload_bytes = b64decode(signature_bundle["Payload"]) + write(payload_file, payload_bytes) + + cmd = [ + "cosign", + "verify-blob", + "--key", + pubkey, + "--bundle", + signature_file.name, + payload_file.name, + ] + result = subprocess.run(cmd, capture_output=True) + if result.returncode != 0: + # XXX Raise instead? + return False + return result.stderr == b"Verified OK\n" + + +def get_runtime_name() -> str: + if platform.system() == "Linux": + return "podman" + return "docker" + + +def container_pull(image: str): + # XXX - Move to container_utils.py + cmd = [get_runtime_name(), "pull", f"{image}"] + process = subprocess.Popen(cmd, stdout=subprocess.PIPE) + process.communicate() + return process.returncode == 0 + + +def new_image_release(): + # XXX - Implement + return True + + +def upgrade_container_image( + image: str, + manifest_hash: str, + pubkey: str, +): + if not new_image_release(): + return + + # manifest_hash = registry.get_manifest_hash(tag) + signatures = get_signatures(image, manifest_hash) + + if len(signatures) < 1: + raise Exception("Unable to retrieve signatures") + + for signature in signatures: + signature_is_valid = verify_signature(signature, pubkey) + if not signature_is_valid: + raise Exception("Unable to verify signature") + + # At this point, the signatures are verified + # We store the signatures just now to avoid storing unverified signatures + store_signatures(signatures, manifest_hash, pubkey) + + # let's upgrade the image + # XXX Use the hash here to avoid race conditions + return container_pull(image) + + +def get_file_hash(file: str) -> str: + with open(file, "rb") as f: + content = f.read() + return sha256(content).hexdigest() + + +def load_signatures(image_hash, pubkey): + """ + Load signatures from the local filesystem + + See store_signatures() for the expected format. + """ + pubkey_signatures = SIGNATURES_PATH / get_file_hash(pubkey) + if not pubkey_signatures.exists(): + msg = ( + f"Cannot find a '{pubkey_signatures}' folder." + "You might need to download the image signatures first." + ) + raise Exception(msg) + + with open(pubkey_signatures / f"{image_hash}.json") as f: + return json.load(f) + + +def store_signatures(signatures: list[Dict], image_hash: str, pubkey: str): + """ + Store signatures locally in the SIGNATURE_PATH folder, like this: + + ~/.config/dangerzone/signatures/ + └── + └── .json + └── .json + + The format used in the `.json` file is the one of `cosign download + signature`, which differs from the "bundle" one used afterwards. + + It can be converted to the one expected by cosign verify --bundle with + the `signature_to_bundle()` function. + """ + + def _get_digest(sig): + payload = json.loads(b64decode(sig["Payload"])) + return payload["critical"]["image"]["docker-manifest-digest"] + + # All the signatures should share the same hash. + hashes = list(map(_get_digest, signatures)) + if len(set(hashes)) != 1: + raise Exception("Signatures do not share the same image hash") + + if f"sha256:{image_hash}" != hashes[0]: + raise Exception("Signatures do not match the given image hash") + + pubkey_signatures = SIGNATURES_PATH / get_file_hash(pubkey) + pubkey_signatures.mkdir(exist_ok=True) + + with open(pubkey_signatures / f"{image_hash}.json", "w") as f: + json.dump(signatures, f) + + +def verify_offline_image_signature(image: str, pubkey: str) -> bool: + """ + Verifies that a local image has a valid signature + """ + image_hash = load_image_hash(image) + signatures = load_signatures(image_hash, pubkey) + if len(signatures) < 1: + raise Exception("No signatures found") + + for signature in signatures: + if not verify_signature(signature, pubkey): + msg = f"Unable to verify signature for {image} with pubkey {pubkey}" + raise Exception(msg) + return True + + +def load_image_hash(image: str) -> str: + """ + Returns a image hash from a local image name + """ + cmd = [get_runtime_name(), "image", "inspect", image, "-f", "{{.Digest}}"] + result = subprocess.run(cmd, capture_output=True, check=True) + return result.stdout.strip().decode().strip("sha256:") + + +def get_signatures(image, hash) -> List[Dict]: + """ + Retrieve the signatures from cosign download signature and convert each one to the "cosign bundle" format. + """ + + process = subprocess.run( + ["cosign", "download", "signature", f"{image}@sha256:{hash}"], + capture_output=True, + check=True, + ) + + # XXX: Check the output first. + # Remove the last return, split on newlines, convert from JSON + signatures_raw = process.stdout.decode("utf-8").strip().split("\n") + return list(map(json.loads, signatures_raw)) diff --git a/dangerzone/updater/utils.py b/dangerzone/updater/utils.py new file mode 100644 index 0000000..fd7b989 --- /dev/null +++ b/dangerzone/updater/utils.py @@ -0,0 +1,3 @@ +def write(file, content: bytes | str): + file.write(content) + file.flush() diff --git a/dev_scripts/registry.py b/dev_scripts/registry.py deleted file mode 100755 index c5e5f4a..0000000 --- a/dev_scripts/registry.py +++ /dev/null @@ -1,517 +0,0 @@ -#!/usr/bin/python - -import hashlib -import json -import platform -import re -import shutil -import subprocess -from base64 import b64decode -from pathlib import Path -from tempfile import NamedTemporaryFile - -import click -import requests - -try: - import platformdirs -except ImportError: - import appdirs as platformdirs - - -def get_config_dir() -> str: - return Path(platformdirs.user_config_dir("dangerzone")) - - -SIGNATURES_PATH = get_config_dir() / "signatures" - -DEFAULT_REPO = "freedomofpress/dangerzone" -SIGSTORE_BUNDLE = "application/vnd.dev.sigstore.bundle.v0.3+json" -DOCKER_MANIFEST_DISTRIBUTION = "application/vnd.docker.distribution.manifest.v2+json" -DOCKER_MANIFEST_INDEX = "application/vnd.oci.image.index.v1+json" -OCI_IMAGE_MANIFEST = "application/vnd.oci.image.manifest.v1+json" - - -class RegistryClient: - def __init__(self, registry, org, image): - self._registry = registry - self._org = org - self._image = image - self._auth_token = None - self._base_url = f"https://{registry}" - self._image_url = f"{self._base_url}/v2/{self._org}/{self._image}" - - @property - def image(self): - return f"{self._registry}/{self._org}/{self._image}" - - def get_auth_token(self): - if not self._auth_token: - auth_url = f"{self._base_url}/token" - response = requests.get( - auth_url, - params={ - "service": f"{self._registry}", - "scope": f"repository:{self._org}/{self._image}:pull", - }, - ) - response.raise_for_status() - self._auth_token = response.json()["token"] - return self._auth_token - - def get_auth_header(self): - return {"Authorization": f"Bearer {self.get_auth_token()}"} - - def list_tags(self): - url = f"{self._image_url}/tags/list" - response = requests.get(url, headers=self.get_auth_header()) - response.raise_for_status() - tags = response.json().get("tags", []) - return tags - - def get_manifest(self, tag, extra_headers=None): - """Get manifest information for a specific tag""" - manifest_url = f"{self._image_url}/manifests/{tag}" - headers = { - "Accept": DOCKER_MANIFEST_DISTRIBUTION, - "Authorization": f"Bearer {self.get_auth_token()}", - } - if extra_headers: - headers.update(extra_headers) - - response = requests.get(manifest_url, headers=headers) - response.raise_for_status() - return response - - def list_manifests(self, tag): - return ( - self.get_manifest( - tag, - { - "Accept": DOCKER_MANIFEST_INDEX, - }, - ) - .json() - .get("manifests") - ) - - def get_blob(self, hash): - url = f"{self._image_url}/blobs/{hash}" - response = requests.get( - url, - headers={ - "Authorization": f"Bearer {self.get_auth_token()}", - }, - ) - response.raise_for_status() - return response - - def get_manifest_hash(self, tag, tag_manifest_content=None): - if not tag_manifest_content: - tag_manifest_content = self.get_manifest(tag).content - - return hashlib.sha256(tag_manifest_content).hexdigest() - - def get_attestation(self, tag): - """ - Retrieve an attestation from a given tag. - - The attestation needs to be attached using the Cosign Bundle - Specification defined at: - - https://github.com/sigstore/cosign/blob/main/specs/BUNDLE_SPEC.md - """ - - def _find_sigstore_bundle_manifest(manifests): - for manifest in manifests: - if manifest["artifactType"] == SIGSTORE_BUNDLE: - return manifest["mediaType"], manifest["digest"] - - def _get_bundle_blob_digest(layers): - for layer in layers: - if layer.get("mediaType") == SIGSTORE_BUNDLE: - return layer["digest"] - - tag_manifest_content = self.get_manifest(tag).content - - # The attestation is available on the same container registry, with a - # specific tag named "sha256-{sha256(manifest)}" - tag_manifest_hash = self.get_manifest_hash(tag, tag_manifest_content) - - # This will get us a "list" of manifests... - manifests = self.list_manifests(f"sha256-{tag_manifest_hash}") - - # ... from which we want the sigstore bundle - bundle_manifest_mediatype, bundle_manifest_digest = ( - _find_sigstore_bundle_manifest(manifests) - ) - if not bundle_manifest_digest: - raise Error("Not able to find sigstore bundle manifest info") - - bundle_manifest = self.get_manifest( - bundle_manifest_digest, extra_headers={"Accept": bundle_manifest_mediatype} - ).json() - - # From there, we will get the attestation in a blob. - # It will be the first layer listed at this manifest hash location - layers = bundle_manifest.get("layers", []) - - blob_digest = _get_bundle_blob_digest(layers) - bundle = self.get_blob(blob_digest) - return tag_manifest_content, bundle.content - - -def _write(file, content): - file.write(content) - file.flush() - - -def verify_attestation( - registry_client: RegistryClient, image_tag: str, expected_repo: str -): - """ - Look up the image attestation to see if the image has been built - on Github runners, and from a given repository. - """ - manifest, bundle = registry_client.get_attestation(image_tag) - - # Put the value in files and verify with cosign - with ( - NamedTemporaryFile(mode="wb") as manifest_json, - NamedTemporaryFile(mode="wb") as bundle_json, - ): - _write(manifest_json, manifest) - _write(bundle_json, bundle) - - # Call cosign with the temporary file paths - cmd = [ - "cosign", - "verify-blob-attestation", - "--bundle", - bundle_json.name, - "--new-bundle-format", - "--certificate-oidc-issuer", - "https://token.actions.githubusercontent.com", - "--certificate-identity-regexp", - f"^https://github.com/{expected_repo}/.github/workflows/release-container-image.yml@refs/heads/test/image-publication-cosign", - manifest_json.name, - ] - - result = subprocess.run(cmd, capture_output=True) - if result.returncode != 0: - raise Exception(f"Attestation cannot be verified. {result.stderr}") - return True - - -def new_image_release(): - # XXX - Implement - return True - - -def signature_to_bundle(sig): - # Convert cosign-download signatures to the format expected by cosign bundle. - bundle = sig["Bundle"] - payload = bundle["Payload"] - return { - "base64Signature": sig["Base64Signature"], - "Payload": sig["Payload"], - "cert": sig["Cert"], - "chain": sig["Chain"], - "rekorBundle": { - "SignedEntryTimestamp": bundle["SignedEntryTimestamp"], - "Payload": { - "body": payload["body"], - "integratedTime": payload["integratedTime"], - "logIndex": payload["logIndex"], - "logID": payload["logID"], - }, - }, - "RFC3161Timestamp": sig["RFC3161Timestamp"], - } - - -def verify_signature(signature, pubkey): - """Verify a signature against a given public key""" - - signature_bundle = signature_to_bundle(signature) - - # Put the value in files and verify with cosign - with ( - NamedTemporaryFile(mode="w") as signature_file, - NamedTemporaryFile(mode="bw") as payload_file, - ): - json.dump(signature_bundle, signature_file) - signature_file.flush() - - payload_bytes = b64decode(signature_bundle["Payload"]) - _write(payload_file, payload_bytes) - - cmd = [ - "cosign", - "verify-blob", - "--key", - pubkey, - "--bundle", - signature_file.name, - payload_file.name, - ] - result = subprocess.run(cmd, capture_output=True) - if result.returncode != 0: - # XXX Raise instead? - return False - return result.stderr == b"Verified OK\n" - - -def get_runtime_name() -> str: - if platform.system() == "Linux": - return "podman" - return "docker" - - -def container_pull(image): - cmd = [get_runtime_name(), "pull", f"{image}"] - process = subprocess.Popen(cmd, stdout=subprocess.PIPE) - process.communicate() - - -def upgrade_container_image(image, tag, pubkey, registry: RegistryClient): - if not new_image_release(): - return - - hash = registry.get_manifest_hash(tag) - signatures = get_signatures(image, hash) - - if len(signatures) < 1: - raise Exception("Unable to retrieve signatures") - - print(f"Found {len(signatures)} signature(s) for {image}") - for signature in signatures: - signature_is_valid = verify_signature(signature, pubkey) - if not signature_is_valid: - raise Exception("Unable to verify signature") - print("✅ Signature is valid") - - # At this point, the signatures are verified - # We store the signatures just now to avoid storing unverified signatures - store_signatures(signatures, hash, pubkey) - - # let's upgrade the image - # XXX Use the hash here to avoid race conditions - container_pull(image) - - -def get_file_hash(file): - with open(file, "rb") as f: - content = f.read() - return hashlib.sha256(content).hexdigest() - - -def load_signatures(image_hash, pubkey): - pubkey_signatures = SIGNATURES_PATH / get_file_hash(pubkey) - if not pubkey_signatures.exists(): - msg = ( - f"Cannot find a '{pubkey_signatures}' folder." - "You might need to download the image signatures first." - ) - raise Exception(msg) - - with open(pubkey_signatures / f"{image_hash}.json") as f: - return json.load(f) - - -def store_signatures(signatures, image_hash, pubkey): - """ - Store signatures locally in the SIGNATURE_PATH folder, like this: - - ~/.config/dangerzone/signatures/ - └── - └── .json - └── .json - - The format used in the `.json` file is the one of `cosign download - signature`, which differs from the "bundle" one used afterwards. - - It can be converted to the one expected by cosign verify --bundle with - the `signature_to_bundle()` function. - """ - - def _get_digest(sig): - payload = json.loads(b64decode(sig["Payload"])) - return payload["critical"]["image"]["docker-manifest-digest"] - - # All the signatures should share the same hash. - hashes = list(map(_get_digest, signatures)) - if len(set(hashes)) != 1: - raise Exception("Signatures do not share the same image hash") - - if f"sha256:{image_hash}" != hashes[0]: - raise Exception("Signatures do not match the given image hash") - - pubkey_signatures = SIGNATURES_PATH / get_file_hash(pubkey) - pubkey_signatures.mkdir(exist_ok=True) - - with open(pubkey_signatures / f"{image_hash}.json", "w") as f: - json.dump(signatures, f) - - -def verify_local_image_signature(image, pubkey): - """ - Verifies that a local image has a valid signature - """ - image_hash = get_image_hash(image) - signatures = load_signatures(image_hash, pubkey) - if len(signatures) < 1: - raise Exception("No signatures found") - - for signature in signatures: - if not verify_signature(signature, pubkey): - msg = f"Unable to verify signature for {image} with pubkey {pubkey}" - raise Exception(msg) - return True - - -def get_image_hash(image): - """ - Returns a image hash from a local image name - """ - cmd = [get_runtime_name(), "image", "inspect", image, "-f", "{{.Digest}}"] - result = subprocess.run(cmd, capture_output=True, check=True) - return result.stdout.strip().decode().strip("sha256:") - - -def get_signatures(image, hash): - """ - Retrieve the signatures from cosign download signature and convert each one to the "cosign bundle" format. - """ - - process = subprocess.run( - ["cosign", "download", "signature", f"{image}@sha256:{hash}"], - capture_output=True, - check=True, - ) - - # XXX: Check the output first. - # Remove the last return, split on newlines, convert from JSON - signatures_raw = process.stdout.decode("utf-8").strip().split("\n") - return list(map(json.loads, signatures_raw)) - - -class Image: - def __init__(self, registry, namespace, repository, tag="latest"): - self.registry = registry - self.namespace = namespace - self.repository = repository - self.tag = tag - - def properties(self): - return (self.registry, self.namespace, self.repository, self.tag) - - @property - def name_without_tag(self): - return f"{self.registry}/{self.namespace}/{self.repository}" - - @property - def name_with_tag(self): - return f"{self.name_without_tag}:{self.tag}" - - @classmethod - def from_string(cls, input_string): - """Parses container image location into (registry, namespace, repository, tag)""" - pattern = ( - r"^" - r"(?P[a-zA-Z0-9.-]+)/" - r"(?P[a-zA-Z0-9-]+)/" - r"(?P[^:]+)" - r"(?::(?P[a-zA-Z0-9.-]+))?" - r"$" - ) - match = re.match(pattern, input_string) - if not match: - raise ValueError("Malformed image location") - - return cls( - match.group("registry"), - match.group("namespace"), - match.group("repository"), - match.group("tag") or "latest", - ) - - -def parse_image_location(string): - return Image.from_string(string).properties - - -@click.group() -def main(): - pass - - -@main.command() -@click.argument("image") -@click.option("--pubkey", default="pub.key") -def upgrade_image(image, pubkey): - registry, namespace, repository, tag = parse_image_location(image) - registry_client = RegistryClient(registry, namespace, repository) - - upgrade_container_image(image, tag, pubkey, registry_client) - - -@main.command() -@click.argument("image") -@click.option("--pubkey", default="pub.key") -def verify_local_image(image, pubkey): - # XXX remove a potentiel :tag - if verify_local_image_signature(image, pubkey): - click.echo(f"✅ The local image {image} has been signed with {pubkey}") - - -@main.command() -@click.argument("image") -def list_tags(image): - registry, org, package, _ = parse_image_location(image) - client = RegistryClient(registry, org, package) - tags = client.list_tags() - click.echo(f"Existing tags for {client.image}") - for tag in tags: - click.echo(tag) - - -@main.command() -@click.argument("image") -@click.argument("tag") -def get_manifest(image, tag): - registry, org, package, _ = parse_image_location(image) - client = RegistryClient(registry, org, package) - resp = client.get_manifest(tag, extra_headers={"Accept": OCI_IMAGE_MANIFEST}) - click.echo(resp.content) - - -@main.command() -@click.argument("image") -@click.option( - "--repo", - default=DEFAULT_REPO, - help="The github repository to check the attestation for", -) -def attest(image: str, repo: str): - """ - Look up the image attestation to see if the image has been built - on Github runners, and from a given repository. - """ - if shutil.which("cosign") is None: - click.echo("The cosign binary is needed but not installed.") - raise click.Abort() - - registry, org, package, tag = parse_image_location(image) - tag = tag or "latest" - - client = RegistryClient(registry, org, package) - verified = verify_attestation(client, tag, repo) - if verified: - click.echo( - f"🎉 The image available at `{client.image}:{tag}` has been built by Github Runners from the `{repo}` repository" - ) - - -if __name__ == "__main__": - main() diff --git a/pyproject.toml b/pyproject.toml index 4bb4bb4..58093a4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,6 +34,7 @@ shiboken6 = [ [tool.poetry.scripts] dangerzone = 'dangerzone:main' dangerzone-cli = 'dangerzone:main' +dangerzone-image = "dangerzone.updater.cli:main" # Dependencies required for packaging the code on various platforms. [tool.poetry.group.package.dependencies] From fd1db717b7e3f5a734284e8792913cecc8ea8719 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Wed, 29 Jan 2025 17:01:48 +0100 Subject: [PATCH 13/18] Refactoring of dangerzone/updater/* --- dangerzone/updater/__init__.py | 4 ++ dangerzone/updater/attestations.py | 10 +-- dangerzone/updater/cli.py | 36 +++++----- dangerzone/updater/registry.py | 110 ++++++++++++++++++----------- dangerzone/updater/signatures.py | 24 +++---- dangerzone/updater/utils.py | 3 - 6 files changed, 104 insertions(+), 83 deletions(-) delete mode 100644 dangerzone/updater/utils.py diff --git a/dangerzone/updater/__init__.py b/dangerzone/updater/__init__.py index e69de29..9ae9065 100644 --- a/dangerzone/updater/__init__.py +++ b/dangerzone/updater/__init__.py @@ -0,0 +1,4 @@ +import logging + +log = logging.getLogger(__name__) +log.setLevel(logging.INFO) diff --git a/dangerzone/updater/attestations.py b/dangerzone/updater/attestations.py index 8839e1c..c7d0a87 100644 --- a/dangerzone/updater/attestations.py +++ b/dangerzone/updater/attestations.py @@ -1,12 +1,10 @@ import subprocess from tempfile import NamedTemporaryFile -from .utils import write - def verify_attestation( manifest: bytes, attestation_bundle: bytes, image_tag: str, expected_repo: str -): +) -> bool: """ Look up the image attestation to see if the image has been built on Github runners, and from a given repository. @@ -17,8 +15,10 @@ def verify_attestation( NamedTemporaryFile(mode="wb") as manifest_json, NamedTemporaryFile(mode="wb") as attestation_bundle_json, ): - write(manifest_json, manifest) - write(attestation_bundle_json, attestation_bundle) + manifest_json.write(manifest) + manifest_json.flush() + attestation_bundle_json.write(attestation_bundle) + attestation_bundle_json.flush() # Call cosign with the temporary file paths cmd = [ diff --git a/dangerzone/updater/cli.py b/dangerzone/updater/cli.py index 6d3c9fb..e88817a 100644 --- a/dangerzone/updater/cli.py +++ b/dangerzone/updater/cli.py @@ -6,19 +6,20 @@ from . import registry from .attestations import verify_attestation from .signatures import upgrade_container_image, verify_offline_image_signature -DEFAULT_REPO = "freedomofpress/dangerzone" +DEFAULT_REPOSITORY = "freedomofpress/dangerzone" @click.group() -def main(): +def main() -> None: pass @main.command() -@click.argument("image") +@click.option("--image") @click.option("--pubkey", default="pub.key") +@click.option("--airgap", is_flag=True) # XXX Add options to do airgap upgrade -def upgrade(image, pubkey): +def upgrade(image: str, pubkey: str) -> None: manifest_hash = registry.get_manifest_hash(image) if upgrade_container_image(image, manifest_hash, pubkey): click.echo(f"✅ The local image {image} has been upgraded") @@ -27,9 +28,9 @@ def upgrade(image, pubkey): @main.command() @click.argument("image") @click.option("--pubkey", default="pub.key") -def verify_local(image, pubkey): +def verify_local(image: str, pubkey: str) -> None: """ - XXX document + Verify the local image signature against a public key and the stored signatures. """ # XXX remove a potentiel :tag if verify_offline_image_signature(image, pubkey): @@ -38,28 +39,26 @@ def verify_local(image, pubkey): @main.command() @click.argument("image") -def list_tags(image): - click.echo(f"Existing tags for {client.image}") +def list_remote_tags(image: str) -> None: + click.echo(f"Existing tags for {image}") for tag in registry.list_tags(image): click.echo(tag) @main.command() @click.argument("image") -@click.argument("tag") -def get_manifest(image, tag): - click.echo(registry.get_manifest(image, tag)) +def get_manifest(image: str) -> None: + click.echo(registry.get_manifest(image)) @main.command() @click.argument("image") @click.option( - "--repo", - default=DEFAULT_REPO, + "--repository", + default=DEFAULT_REPOSITORY, help="The github repository to check the attestation for", ) -# XXX use a consistent naming for these cli commands -def attest(image: str, repo: str): +def attest_provenance(image: str, repository: str) -> None: """ Look up the image attestation to see if the image has been built on Github runners, and from a given repository. @@ -68,14 +67,13 @@ def attest(image: str, repo: str): # if shutil.which("cosign") is None: # click.echo("The cosign binary is needed but not installed.") # raise click.Abort() - # XXX: refactor parse_image_location to return a dict. - _, _, _, image_tag = registry.parse_image_location(image) + parsed = registry.parse_image_location(image) manifest, bundle = registry.get_attestation(image) - verified = verify_attestation(manifest, bundle, image_tag, repo) + verified = verify_attestation(manifest, bundle, parsed.tag, repository) if verified: click.echo( - f"🎉 The image available at `{client.image}:{image_tag}` has been built by Github Runners from the `{repo}` repository" + f"🎉 The image available at `{parsed.full_name}` has been built by Github Runners from the `{repository}` repository" ) diff --git a/dangerzone/updater/registry.py b/dangerzone/updater/registry.py index 381124f..de919ea 100644 --- a/dangerzone/updater/registry.py +++ b/dangerzone/updater/registry.py @@ -1,12 +1,17 @@ import hashlib import re +from collections import namedtuple from typing import Dict, Optional, Tuple import requests +from . import log + __all__ = [ "get_manifest_hash", "list_tags", + "get_manifest", + "get_attestation", ] SIGSTORE_BUNDLE = "application/vnd.dev.sigstore.bundle.v0.3+json" @@ -15,40 +20,51 @@ DOCKER_MANIFEST_INDEX = "application/vnd.oci.image.index.v1+json" OCI_IMAGE_MANIFEST = "application/vnd.oci.image.manifest.v1+json" -def parse_image_location(input_string: str) -> Tuple[str, str, str, str]: - """Parses container image location into (registry, namespace, repository, tag)""" +class Image(namedtuple("Image", ["registry", "namespace", "image_name", "tag"])): + __slots__ = () + + @property + def full_name(self) -> str: + tag = f":{self.tag}" if self.tag else "" + return f"{self.registry}/{self.namespace}/{self.image_name}{tag}" + + +def parse_image_location(input_string: str) -> Image: + """Parses container image location into an Image namedtuple""" pattern = ( r"^" r"(?P[a-zA-Z0-9.-]+)/" r"(?P[a-zA-Z0-9-]+)/" - r"(?P[^:]+)" + r"(?P[^:]+)" r"(?::(?P[a-zA-Z0-9.-]+))?" r"$" ) match = re.match(pattern, input_string) if not match: raise ValueError("Malformed image location") - - return ( - match.group("registry"), - match.group("namespace"), - match.group("repository"), - match.group("tag") or "latest", + return Image( + registry=match.group("registry"), + namespace=match.group("namespace"), + image_name=match.group("image_name"), + tag=match.group("tag") or "latest", ) class RegistryClient: - def __init__(self, registry: str, org: str, image: str): - self._registry = registry - self._org = org - self._image = image - self._auth_token = None - self._base_url = f"https://{registry}" - self._image_url = f"{self._base_url}/v2/{self._org}/{self._image}" + def __init__( + self, + image: Image | str, + ): + if isinstance(image, str): + image = parse_image_location(image) - @property - def image(self): - return f"{self._registry}/{self._org}/{self._image}" + self._image = image + self._registry = image.registry + self._namespace = image.namespace + self._image_name = image.image_name + self._auth_token = None + self._base_url = f"https://{self._registry}" + self._image_url = f"{self._base_url}/v2/{self._namespace}/{self._image_name}" def get_auth_token(self) -> Optional[str]: if not self._auth_token: @@ -57,7 +73,7 @@ class RegistryClient: auth_url, params={ "service": f"{self._registry}", - "scope": f"repository:{self._org}/{self._image}:pull", + "scope": f"repository:{self._namespace}/{self._image_name}:pull", }, ) response.raise_for_status() @@ -74,7 +90,9 @@ class RegistryClient: tags = response.json().get("tags", []) return tags - def get_manifest(self, tag, extra_headers=None) -> requests.Response: + def get_manifest( + self, tag: str, extra_headers: Optional[dict] = None + ) -> requests.Response: """Get manifest information for a specific tag""" manifest_url = f"{self._image_url}/manifests/{tag}" headers = { @@ -88,7 +106,7 @@ class RegistryClient: response.raise_for_status() return response - def list_manifests(self, tag) -> list: + def list_manifests(self, tag: str) -> list: return ( self.get_manifest( tag, @@ -100,7 +118,7 @@ class RegistryClient: .get("manifests") ) - def get_blob(self, hash) -> requests.Response: + def get_blob(self, hash: str) -> requests.Response: url = f"{self._image_url}/blobs/{hash}" response = requests.get( url, @@ -111,13 +129,15 @@ class RegistryClient: response.raise_for_status() return response - def get_manifest_hash(self, tag, tag_manifest_content=None) -> str: + def get_manifest_hash( + self, tag: str, tag_manifest_content: Optional[bytes] = None + ) -> str: if not tag_manifest_content: tag_manifest_content = self.get_manifest(tag).content return hashlib.sha256(tag_manifest_content).hexdigest() - def get_attestation(self, tag) -> Tuple[bytes, bytes]: + def get_attestation(self, tag: str) -> Tuple[bytes, bytes]: """ Retrieve an attestation from a given tag. @@ -129,15 +149,20 @@ class RegistryClient: Returns a tuple with the tag manifest content and the bundle content. """ - def _find_sigstore_bundle_manifest(manifests): + # FIXME: do not only rely on the first layer + def _find_sigstore_bundle_manifest( + manifests: list, + ) -> Tuple[Optional[str], Optional[str]]: for manifest in manifests: if manifest["artifactType"] == SIGSTORE_BUNDLE: return manifest["mediaType"], manifest["digest"] + return None, None - def _get_bundle_blob_digest(layers): + def _get_bundle_blob_digest(layers: list) -> Optional[str]: for layer in layers: if layer.get("mediaType") == SIGSTORE_BUNDLE: return layer["digest"] + return None tag_manifest_content = self.get_manifest(tag).content @@ -164,30 +189,29 @@ class RegistryClient: layers = bundle_manifest.get("layers", []) blob_digest = _get_bundle_blob_digest(layers) + log.info(f"Found sigstore bundle blob digest: {blob_digest}") + if not blob_digest: + raise Exception("Not able to find sigstore bundle blob info") bundle = self.get_blob(blob_digest) return tag_manifest_content, bundle.content -def get_manifest_hash(image: str) -> str: - registry, org, package, tag = parse_image_location(image) - client = RegistryClient(registry, org, package) - return client.get_manifest_hash(tag) +def get_manifest_hash(image_str: str) -> str: + image = parse_image_location(image_str) + return RegistryClient(image).get_manifest_hash(image.tag) -def list_tags(image: str) -> list: - registry, org, package, _ = parse_image_location(image) - client = RegistryClient(registry, org, package) - return client.list_tags() +def list_tags(image_str: str) -> list: + return RegistryClient(image_str).list_tags() -def get_manifest(image: str, tag: str) -> bytes: - registry, org, package, _ = parse_image_location(image) - client = RegistryClient(registry, org, package) - resp = client.get_manifest(tag, extra_headers={"Accept": OCI_IMAGE_MANIFEST}) +def get_manifest(image_str: str) -> bytes: + image = parse_image_location(image_str) + client = RegistryClient(image) + resp = client.get_manifest(image.tag, extra_headers={"Accept": OCI_IMAGE_MANIFEST}) return resp.content -def get_attestation(image: str) -> Tuple[bytes, bytes]: - registry, org, package, tag = parse_image_location(image) - client = RegistryClient(registry, org, package) - return client.get_attestation(tag) +def get_attestation(image_str: str) -> Tuple[bytes, bytes]: + image = parse_image_location(image_str) + return RegistryClient(image).get_attestation(image.tag) diff --git a/dangerzone/updater/signatures.py b/dangerzone/updater/signatures.py index 42154c2..c8e61ee 100644 --- a/dangerzone/updater/signatures.py +++ b/dangerzone/updater/signatures.py @@ -8,13 +8,10 @@ from pathlib import Path from tempfile import NamedTemporaryFile from typing import Dict, List, Tuple -from .registry import RegistryClient -from .utils import write - try: import platformdirs except ImportError: - import appdirs as platformdirs + import appdirs as platformdirs # type: ignore[no-redef] def get_config_dir() -> Path: @@ -67,7 +64,8 @@ def verify_signature(signature: dict, pubkey: str) -> bool: signature_file.flush() payload_bytes = b64decode(signature_bundle["Payload"]) - write(payload_file, payload_bytes) + payload_file.write(payload_bytes) + payload_file.flush() cmd = [ "cosign", @@ -91,7 +89,7 @@ def get_runtime_name() -> str: return "docker" -def container_pull(image: str): +def container_pull(image: str) -> bool: # XXX - Move to container_utils.py cmd = [get_runtime_name(), "pull", f"{image}"] process = subprocess.Popen(cmd, stdout=subprocess.PIPE) @@ -99,7 +97,7 @@ def container_pull(image: str): return process.returncode == 0 -def new_image_release(): +def new_image_release() -> bool: # XXX - Implement return True @@ -108,9 +106,9 @@ def upgrade_container_image( image: str, manifest_hash: str, pubkey: str, -): +) -> bool: if not new_image_release(): - return + return False # manifest_hash = registry.get_manifest_hash(tag) signatures = get_signatures(image, manifest_hash) @@ -138,7 +136,7 @@ def get_file_hash(file: str) -> str: return sha256(content).hexdigest() -def load_signatures(image_hash, pubkey): +def load_signatures(image_hash: str, pubkey: str) -> List[Dict]: """ Load signatures from the local filesystem @@ -156,7 +154,7 @@ def load_signatures(image_hash, pubkey): return json.load(f) -def store_signatures(signatures: list[Dict], image_hash: str, pubkey: str): +def store_signatures(signatures: list[Dict], image_hash: str, pubkey: str) -> None: """ Store signatures locally in the SIGNATURE_PATH folder, like this: @@ -172,7 +170,7 @@ def store_signatures(signatures: list[Dict], image_hash: str, pubkey: str): the `signature_to_bundle()` function. """ - def _get_digest(sig): + def _get_digest(sig: Dict) -> str: payload = json.loads(b64decode(sig["Payload"])) return payload["critical"]["image"]["docker-manifest-digest"] @@ -216,7 +214,7 @@ def load_image_hash(image: str) -> str: return result.stdout.strip().decode().strip("sha256:") -def get_signatures(image, hash) -> List[Dict]: +def get_signatures(image: str, hash: str) -> List[Dict]: """ Retrieve the signatures from cosign download signature and convert each one to the "cosign bundle" format. """ diff --git a/dangerzone/updater/utils.py b/dangerzone/updater/utils.py deleted file mode 100644 index fd7b989..0000000 --- a/dangerzone/updater/utils.py +++ /dev/null @@ -1,3 +0,0 @@ -def write(file, content: bytes | str): - file.write(content) - file.flush() From 7991a5cb9cc8960ffcead71a06dcb5de1e8f78dc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Wed, 29 Jan 2025 19:14:40 +0100 Subject: [PATCH 14/18] Some more refactoring --- dangerzone/container_utils.py | 23 ++++++++-- dangerzone/updater/__init__.py | 1 - dangerzone/updater/cli.py | 41 +++++++++++++---- dangerzone/updater/errors.py | 38 +++++++++++++++ dangerzone/updater/registry.py | 6 +-- dangerzone/updater/signatures.py | 79 +++++++++++++++++--------------- dangerzone/util.py | 2 +- dev_scripts/dangerzone-image | 1 + 8 files changed, 134 insertions(+), 57 deletions(-) create mode 100644 dangerzone/updater/errors.py create mode 120000 dev_scripts/dangerzone-image diff --git a/dangerzone/container_utils.py b/dangerzone/container_utils.py index 99c9a08..493dace 100644 --- a/dangerzone/container_utils.py +++ b/dangerzone/container_utils.py @@ -15,11 +15,9 @@ log = logging.getLogger(__name__) def get_runtime_name() -> str: if platform.system() == "Linux": - runtime_name = "podman" - else: - # Windows, Darwin, and unknown use docker for now, dangerzone-vm eventually - runtime_name = "docker" - return runtime_name + return "podman" + # Windows, Darwin, and unknown use docker for now, dangerzone-vm eventually + return "docker" def get_runtime_version() -> Tuple[int, int]: @@ -147,3 +145,18 @@ def load_image_tarball() -> None: ) log.info("Successfully installed container image from") + + +def container_pull(image: str) -> bool: + """Pull a container image from a registry.""" + cmd = [get_runtime_name(), "pull", f"{image}"] + process = subprocess.Popen(cmd, stdout=subprocess.PIPE) + process.communicate() + return process.returncode == 0 + + +def load_image_hash(image: str) -> str: + """Returns a image hash from a local image name""" + cmd = [get_runtime_name(), "image", "inspect", image, "-f", "{{.Digest}}"] + result = subprocess.run(cmd, capture_output=True, check=True) + return result.stdout.strip().decode().strip("sha256:") diff --git a/dangerzone/updater/__init__.py b/dangerzone/updater/__init__.py index 9ae9065..3988bf1 100644 --- a/dangerzone/updater/__init__.py +++ b/dangerzone/updater/__init__.py @@ -1,4 +1,3 @@ import logging log = logging.getLogger(__name__) -log.setLevel(logging.INFO) diff --git a/dangerzone/updater/cli.py b/dangerzone/updater/cli.py index e88817a..39fc16b 100644 --- a/dangerzone/updater/cli.py +++ b/dangerzone/updater/cli.py @@ -1,40 +1,63 @@ #!/usr/bin/python +import logging + import click -from . import registry +from ..util import get_resource_path +from . import errors, log, registry from .attestations import verify_attestation from .signatures import upgrade_container_image, verify_offline_image_signature DEFAULT_REPOSITORY = "freedomofpress/dangerzone" +PUBKEY_DEFAULT_LOCATION = get_resource_path("freedomofpress-dangerzone-pub.key") + @click.group() -def main() -> None: - pass +@click.option("--debug", is_flag=True) +def main(debug=False) -> None: + if debug: + click.echo("Debug mode enabled") + level = logging.DEBUG + else: + level = logging.INFO + logging.basicConfig(level=level) @main.command() @click.option("--image") -@click.option("--pubkey", default="pub.key") +@click.option("--pubkey", default=PUBKEY_DEFAULT_LOCATION) @click.option("--airgap", is_flag=True) # XXX Add options to do airgap upgrade -def upgrade(image: str, pubkey: str) -> None: +def upgrade(image: str, pubkey: str, airgap: bool) -> None: + """Upgrade the image to the latest signed version.""" manifest_hash = registry.get_manifest_hash(image) - if upgrade_container_image(image, manifest_hash, pubkey): + try: + is_upgraded = upgrade_container_image(image, manifest_hash, pubkey) click.echo(f"✅ The local image {image} has been upgraded") + except errors.ImageAlreadyUpToDate as e: + click.echo(f"✅ {e}") + raise click.Abort() @main.command() @click.argument("image") -@click.option("--pubkey", default="pub.key") -def verify_local(image: str, pubkey: str) -> None: +@click.option("--pubkey", default=PUBKEY_DEFAULT_LOCATION) +def verify_offline(image: str, pubkey: str) -> None: """ Verify the local image signature against a public key and the stored signatures. """ # XXX remove a potentiel :tag if verify_offline_image_signature(image, pubkey): - click.echo(f"✅ The local image {image} has been signed with {pubkey}") + click.echo( + ( + f"Verifying the local image:\n\n" + f"pubkey: {pubkey}\n" + f"image: {image}\n\n" + f"✅ The local image {image} has been signed with {pubkey}" + ) + ) @main.command() diff --git a/dangerzone/updater/errors.py b/dangerzone/updater/errors.py new file mode 100644 index 0000000..40dd1dc --- /dev/null +++ b/dangerzone/updater/errors.py @@ -0,0 +1,38 @@ +class UpdaterError(Exception): + pass + + +class ImageAlreadyUpToDate(UpdaterError): + pass + + +class SignatureError(UpdaterError): + pass + + +class RegistryError(UpdaterError): + pass + + +class NoRemoteSignatures(SignatureError): + pass + + +class SignatureVerificationError(SignatureError): + pass + + +class SignaturesFolderDoesNotExist(SignatureError): + pass + + +class InvalidSignatures(SignatureError): + pass + + +class SignatureMismatch(SignatureError): + pass + + +class LocalSignatureNotFound(SignatureError): + pass diff --git a/dangerzone/updater/registry.py b/dangerzone/updater/registry.py index de919ea..306616e 100644 --- a/dangerzone/updater/registry.py +++ b/dangerzone/updater/registry.py @@ -5,7 +5,7 @@ from typing import Dict, Optional, Tuple import requests -from . import log +from . import errors, log __all__ = [ "get_manifest_hash", @@ -178,7 +178,7 @@ class RegistryClient: _find_sigstore_bundle_manifest(manifests) ) if not bundle_manifest_digest: - raise Exception("Not able to find sigstore bundle manifest info") + raise errors.RegistryError("Not able to find sigstore bundle manifest info") bundle_manifest = self.get_manifest( bundle_manifest_digest, extra_headers={"Accept": bundle_manifest_mediatype} @@ -191,7 +191,7 @@ class RegistryClient: blob_digest = _get_bundle_blob_digest(layers) log.info(f"Found sigstore bundle blob digest: {blob_digest}") if not blob_digest: - raise Exception("Not able to find sigstore bundle blob info") + raise errors.RegistryError("Not able to find sigstore bundle blob info") bundle = self.get_blob(blob_digest) return tag_manifest_content, bundle.content diff --git a/dangerzone/updater/signatures.py b/dangerzone/updater/signatures.py index c8e61ee..655c2a3 100644 --- a/dangerzone/updater/signatures.py +++ b/dangerzone/updater/signatures.py @@ -8,6 +8,10 @@ from pathlib import Path from tempfile import NamedTemporaryFile from typing import Dict, List, Tuple +from ..container_utils import container_pull, load_image_hash +from . import errors, log +from .registry import get_manifest_hash + try: import platformdirs except ImportError: @@ -24,10 +28,18 @@ __all__ = [ "verify_signature", "load_signatures", "store_signatures", - "verify_local_image_signature", + "verify_offline_image_signature", ] +def is_cosign_installed() -> bool: + try: + subprocess.run(["cosign", "version"], capture_output=True, check=True) + return True + except subprocess.CalledProcessError: + return False + + def signature_to_bundle(sig: Dict) -> Dict: """Convert a cosign-download signature to the format expected by cosign bundle.""" bundle = sig["Bundle"] @@ -55,7 +67,6 @@ def verify_signature(signature: dict, pubkey: str) -> bool: signature_bundle = signature_to_bundle(signature) - # Put the value in files and verify with cosign with ( NamedTemporaryFile(mode="w") as signature_file, NamedTemporaryFile(mode="bw") as payload_file, @@ -76,30 +87,24 @@ def verify_signature(signature: dict, pubkey: str) -> bool: signature_file.name, payload_file.name, ] + log.debug(" ".join(cmd)) result = subprocess.run(cmd, capture_output=True) if result.returncode != 0: # XXX Raise instead? + log.debug("Failed to verify signature", result.stderr) return False - return result.stderr == b"Verified OK\n" + if result.stderr == b"Verified OK\n": + log.debug("Signature verified") + return True + return False -def get_runtime_name() -> str: - if platform.system() == "Linux": - return "podman" - return "docker" - - -def container_pull(image: str) -> bool: - # XXX - Move to container_utils.py - cmd = [get_runtime_name(), "pull", f"{image}"] - process = subprocess.Popen(cmd, stdout=subprocess.PIPE) - process.communicate() - return process.returncode == 0 - - -def new_image_release() -> bool: - # XXX - Implement - return True +def new_image_release(image) -> bool: + remote_hash = get_manifest_hash(image) + local_hash = load_image_hash(image) + log.debug("Remote hash: %s", remote_hash) + log.debug("Local hash: %s", local_hash) + return remote_hash != local_hash def upgrade_container_image( @@ -107,19 +112,20 @@ def upgrade_container_image( manifest_hash: str, pubkey: str, ) -> bool: - if not new_image_release(): + if not new_image_release(image): + raise errors.ImageAlreadyUpToDate("The image is already up to date") return False - # manifest_hash = registry.get_manifest_hash(tag) signatures = get_signatures(image, manifest_hash) + log.debug("Signatures: %s", signatures) if len(signatures) < 1: - raise Exception("Unable to retrieve signatures") + raise errors.NoRemoteSignatures("No remote signatures found") for signature in signatures: signature_is_valid = verify_signature(signature, pubkey) if not signature_is_valid: - raise Exception("Unable to verify signature") + raise errors.SignatureVerificationError() # At this point, the signatures are verified # We store the signatures just now to avoid storing unverified signatures @@ -148,9 +154,10 @@ def load_signatures(image_hash: str, pubkey: str) -> List[Dict]: f"Cannot find a '{pubkey_signatures}' folder." "You might need to download the image signatures first." ) - raise Exception(msg) + raise errors.SignaturesFolderDoesNotExist(msg) with open(pubkey_signatures / f"{image_hash}.json") as f: + log.debug("Loading signatures from %s", f.name) return json.load(f) @@ -177,15 +184,18 @@ def store_signatures(signatures: list[Dict], image_hash: str, pubkey: str) -> No # All the signatures should share the same hash. hashes = list(map(_get_digest, signatures)) if len(set(hashes)) != 1: - raise Exception("Signatures do not share the same image hash") + raise errors.InvalidSignatures("Signatures do not share the same image hash") if f"sha256:{image_hash}" != hashes[0]: - raise Exception("Signatures do not match the given image hash") + raise errors.SignatureMismatch("Signatures do not match the given image hash") pubkey_signatures = SIGNATURES_PATH / get_file_hash(pubkey) pubkey_signatures.mkdir(exist_ok=True) with open(pubkey_signatures / f"{image_hash}.json", "w") as f: + log.debug( + f"Storing signatures for {image_hash} in {pubkey_signatures}/{image_hash}.json" + ) json.dump(signatures, f) @@ -193,27 +203,20 @@ def verify_offline_image_signature(image: str, pubkey: str) -> bool: """ Verifies that a local image has a valid signature """ + log.info(f"Verifying local image {image} against pubkey {pubkey}") image_hash = load_image_hash(image) + log.debug(f"Image hash: {image_hash}") signatures = load_signatures(image_hash, pubkey) if len(signatures) < 1: - raise Exception("No signatures found") + raise errors.LocalSignatureNotFound("No signatures found") for signature in signatures: if not verify_signature(signature, pubkey): msg = f"Unable to verify signature for {image} with pubkey {pubkey}" - raise Exception(msg) + raise errors.SignatureVerificationError(msg) return True -def load_image_hash(image: str) -> str: - """ - Returns a image hash from a local image name - """ - cmd = [get_runtime_name(), "image", "inspect", image, "-f", "{{.Digest}}"] - result = subprocess.run(cmd, capture_output=True, check=True) - return result.stdout.strip().decode().strip("sha256:") - - def get_signatures(image: str, hash: str) -> List[Dict]: """ Retrieve the signatures from cosign download signature and convert each one to the "cosign bundle" format. diff --git a/dangerzone/util.py b/dangerzone/util.py index bcad701..90f77cc 100644 --- a/dangerzone/util.py +++ b/dangerzone/util.py @@ -8,7 +8,7 @@ import unicodedata try: import platformdirs except ImportError: - import appdirs as platformdirs + import appdirs as platformdirs # type: ignore[no-redef] def get_config_dir() -> str: diff --git a/dev_scripts/dangerzone-image b/dev_scripts/dangerzone-image new file mode 120000 index 0000000..2fe47df --- /dev/null +++ b/dev_scripts/dangerzone-image @@ -0,0 +1 @@ +dangerzone \ No newline at end of file From 7bbd260c72049eddfb61776f9b41efa92dd4050e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Wed, 29 Jan 2025 19:31:30 +0100 Subject: [PATCH 15/18] Add a dev_scripts/dangerzone-image --- dev_scripts/dangerzone-image | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) mode change 120000 => 100755 dev_scripts/dangerzone-image diff --git a/dev_scripts/dangerzone-image b/dev_scripts/dangerzone-image deleted file mode 120000 index 2fe47df..0000000 --- a/dev_scripts/dangerzone-image +++ /dev/null @@ -1 +0,0 @@ -dangerzone \ No newline at end of file diff --git a/dev_scripts/dangerzone-image b/dev_scripts/dangerzone-image new file mode 100755 index 0000000..5467207 --- /dev/null +++ b/dev_scripts/dangerzone-image @@ -0,0 +1,13 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- + +import os +import sys + +# Load dangerzone module and resources from the source code tree +sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) +sys.dangerzone_dev = True + +from dangerzone.updater import cli + +cli.main() From f7069a9c163464513c81835f526026890d5b8c1b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Wed, 29 Jan 2025 19:31:54 +0100 Subject: [PATCH 16/18] Ensure cosign is installed before trying to use it --- dangerzone/updater/attestations.py | 3 +++ dangerzone/updater/errors.py | 4 ++++ dangerzone/updater/registry.py | 2 ++ dangerzone/updater/signatures.py | 12 +++--------- dangerzone/updater/utils.py | 10 ++++++++++ 5 files changed, 22 insertions(+), 9 deletions(-) create mode 100644 dangerzone/updater/utils.py diff --git a/dangerzone/updater/attestations.py b/dangerzone/updater/attestations.py index c7d0a87..3028ec3 100644 --- a/dangerzone/updater/attestations.py +++ b/dangerzone/updater/attestations.py @@ -1,6 +1,8 @@ import subprocess from tempfile import NamedTemporaryFile +from . import utils + def verify_attestation( manifest: bytes, attestation_bundle: bytes, image_tag: str, expected_repo: str @@ -9,6 +11,7 @@ def verify_attestation( Look up the image attestation to see if the image has been built on Github runners, and from a given repository. """ + utils.ensure_cosign() # Put the value in files and verify with cosign with ( diff --git a/dangerzone/updater/errors.py b/dangerzone/updater/errors.py index 40dd1dc..7297991 100644 --- a/dangerzone/updater/errors.py +++ b/dangerzone/updater/errors.py @@ -36,3 +36,7 @@ class SignatureMismatch(SignatureError): class LocalSignatureNotFound(SignatureError): pass + + +class CosignNotInstalledError(SignatureError): + pass diff --git a/dangerzone/updater/registry.py b/dangerzone/updater/registry.py index 306616e..2fd4eca 100644 --- a/dangerzone/updater/registry.py +++ b/dangerzone/updater/registry.py @@ -12,6 +12,8 @@ __all__ = [ "list_tags", "get_manifest", "get_attestation", + "Image", + "parse_image_location", ] SIGSTORE_BUNDLE = "application/vnd.dev.sigstore.bundle.v0.3+json" diff --git a/dangerzone/updater/signatures.py b/dangerzone/updater/signatures.py index 655c2a3..898d95b 100644 --- a/dangerzone/updater/signatures.py +++ b/dangerzone/updater/signatures.py @@ -9,7 +9,7 @@ from tempfile import NamedTemporaryFile from typing import Dict, List, Tuple from ..container_utils import container_pull, load_image_hash -from . import errors, log +from . import errors, log, utils from .registry import get_manifest_hash try: @@ -32,14 +32,6 @@ __all__ = [ ] -def is_cosign_installed() -> bool: - try: - subprocess.run(["cosign", "version"], capture_output=True, check=True) - return True - except subprocess.CalledProcessError: - return False - - def signature_to_bundle(sig: Dict) -> Dict: """Convert a cosign-download signature to the format expected by cosign bundle.""" bundle = sig["Bundle"] @@ -65,6 +57,7 @@ def signature_to_bundle(sig: Dict) -> Dict: def verify_signature(signature: dict, pubkey: str) -> bool: """Verify a signature against a given public key""" + utils.ensure_cosign() signature_bundle = signature_to_bundle(signature) with ( @@ -221,6 +214,7 @@ def get_signatures(image: str, hash: str) -> List[Dict]: """ Retrieve the signatures from cosign download signature and convert each one to the "cosign bundle" format. """ + utils.ensure_cosign() process = subprocess.run( ["cosign", "download", "signature", f"{image}@sha256:{hash}"], diff --git a/dangerzone/updater/utils.py b/dangerzone/updater/utils.py new file mode 100644 index 0000000..a97a49e --- /dev/null +++ b/dangerzone/updater/utils.py @@ -0,0 +1,10 @@ +import subprocess + +from . import errors + + +def ensure_cosign() -> None: + try: + subprocess.run(["cosign", "version"], capture_output=True, check=True) + except subprocess.CalledProcessError: + raise errors.CosignNotInstalledError() From 087e5bd1ad72a2cfb29f0e6280a119e8ade0e0a4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Mon, 3 Feb 2025 17:39:03 +0100 Subject: [PATCH 17/18] Allow installation on air-gapped systems - Verify the archive against the known public signature - Prepare a new archive format (with signature removed) - Load the new image and retag it with the expected tag During this process, the signatures are lost and should instead be converted to a known format. Additionally, the name fo the repository should ideally come from the signatures rather than from the command line. --- dangerzone/container_utils.py | 38 ++++- dangerzone/isolation_provider/container.py | 2 +- dangerzone/rntime.py | 189 +++++++++++++++++++++ dangerzone/updater/cli.py | 30 +++- dangerzone/updater/registry.py | 1 - dangerzone/updater/signatures.py | 161 ++++++++++++++---- 6 files changed, 373 insertions(+), 48 deletions(-) create mode 100644 dangerzone/rntime.py diff --git a/dangerzone/container_utils.py b/dangerzone/container_utils.py index 493dace..cc0683d 100644 --- a/dangerzone/container_utils.py +++ b/dangerzone/container_utils.py @@ -116,7 +116,7 @@ def get_expected_tag() -> str: return f.read().strip() -def load_image_tarball() -> None: +def load_image_tarball_in_memory() -> None: log.info("Installing Dangerzone container image...") p = subprocess.Popen( [get_runtime(), "load"], @@ -147,6 +147,36 @@ def load_image_tarball() -> None: log.info("Successfully installed container image from") +def load_image_tarball_file(tarball_path: str) -> None: + cmd = [get_runtime(), "load", "-i", tarball_path] + subprocess.run(cmd, startupinfo=get_subprocess_startupinfo(), check=True) + + log.info("Successfully installed container image from %s", tarball_path) + + +def tag_image_by_digest(digest: str, tag: str) -> None: + image_id = get_image_id_by_digest(digest) + cmd = [get_runtime(), "tag", image_id, tag] + log.debug(" ".join(cmd)) + subprocess.run(cmd, startupinfo=get_subprocess_startupinfo(), check=True) + + +def get_image_id_by_digest(digest: str) -> str: + cmd = [ + get_runtime(), + "images", + "-f", + f"digest={digest}", + "--format", + "{{.Id}}", + ] + log.debug(" ".join(cmd)) + process = subprocess.run( + cmd, startupinfo=get_subprocess_startupinfo(), check=True, capture_output=True + ) + return process.stdout.decode().strip() + + def container_pull(image: str) -> bool: """Pull a container image from a registry.""" cmd = [get_runtime_name(), "pull", f"{image}"] @@ -155,8 +185,10 @@ def container_pull(image: str) -> bool: return process.returncode == 0 -def load_image_hash(image: str) -> str: - """Returns a image hash from a local image name""" +def get_local_image_hash(image: str) -> str: + """ + Returns a image hash from a local image name + """ cmd = [get_runtime_name(), "image", "inspect", image, "-f", "{{.Digest}}"] result = subprocess.run(cmd, capture_output=True, check=True) return result.stdout.strip().decode().strip("sha256:") diff --git a/dangerzone/isolation_provider/container.py b/dangerzone/isolation_provider/container.py index 0213cde..adc5c49 100644 --- a/dangerzone/isolation_provider/container.py +++ b/dangerzone/isolation_provider/container.py @@ -102,7 +102,7 @@ class Container(IsolationProvider): return True # Load the image tarball into the container runtime. - container_utils.load_image_tarball() + container_utils.load_image_tarball_in_memory() # Check that the container image has the expected image tag. # See https://github.com/freedomofpress/dangerzone/issues/988 for an example diff --git a/dangerzone/rntime.py b/dangerzone/rntime.py new file mode 100644 index 0000000..7b84a26 --- /dev/null +++ b/dangerzone/rntime.py @@ -0,0 +1,189 @@ +import gzip +import logging +import platform +import shutil +import subprocess +from typing import List, Optional, Tuple + +from . import errors +from .util import get_resource_path, get_subprocess_startupinfo + +CONTAINER_NAME = "dangerzone.rocks/dangerzone" + +log = logging.getLogger(__name__) + + +def get_runtime_name() -> str: + if platform.system() == "Linux": + return "podman" + # Windows, Darwin, and unknown use docker for now, dangerzone-vm eventually + return "docker" + + +def get_runtime_version() -> Tuple[int, int]: + """Get the major/minor parts of the Docker/Podman version. + + Some of the operations we perform in this module rely on some Podman features + that are not available across all of our platforms. In order to have a proper + fallback, we need to know the Podman version. More specifically, we're fine with + just knowing the major and minor version, since writing/installing a full-blown + semver parser is an overkill. + """ + # Get the Docker/Podman version, using a Go template. + runtime = get_runtime_name() + if runtime == "podman": + query = "{{.Client.Version}}" + else: + query = "{{.Server.Version}}" + + cmd = [runtime, "version", "-f", query] + try: + version = subprocess.run( + cmd, + startupinfo=get_subprocess_startupinfo(), + capture_output=True, + check=True, + ).stdout.decode() + except Exception as e: + msg = f"Could not get the version of the {runtime.capitalize()} tool: {e}" + raise RuntimeError(msg) from e + + # Parse this version and return the major/minor parts, since we don't need the + # rest. + try: + major, minor, _ = version.split(".", 3) + return (int(major), int(minor)) + except Exception as e: + msg = ( + f"Could not parse the version of the {runtime.capitalize()} tool" + f" (found: '{version}') due to the following error: {e}" + ) + raise RuntimeError(msg) + + +def get_runtime() -> str: + container_tech = get_runtime_name() + runtime = shutil.which(container_tech) + if runtime is None: + raise errors.NoContainerTechException(container_tech) + return runtime + + +def list_image_tags() -> List[str]: + """Get the tags of all loaded Dangerzone images. + + This method returns a mapping of image tags to image IDs, for all Dangerzone + images. This can be useful when we want to find which are the local image tags, + and which image ID does the "latest" tag point to. + """ + return ( + subprocess.check_output( + [ + get_runtime(), + "image", + "list", + "--format", + "{{ .Tag }}", + CONTAINER_NAME, + ], + text=True, + startupinfo=get_subprocess_startupinfo(), + ) + .strip() + .split() + ) + + +def delete_image_tag(tag: str) -> None: + """Delete a Dangerzone image tag.""" + name = CONTAINER_NAME + ":" + tag + log.warning(f"Deleting old container image: {name}") + try: + subprocess.check_output( + [get_runtime(), "rmi", "--force", name], + startupinfo=get_subprocess_startupinfo(), + ) + except Exception as e: + log.warning( + f"Couldn't delete old container image '{name}', so leaving it there." + f" Original error: {e}" + ) + + +def get_expected_tag() -> str: + """Get the tag of the Dangerzone image tarball from the image-id.txt file.""" + with open(get_resource_path("image-id.txt")) as f: + return f.read().strip() + + +def tag_image_by_digest(digest: str, tag: str) -> None: + image_id = get_image_id_by_digest(digest) + cmd = [get_runtime(), "tag", image_id, tag] + subprocess.run(cmd, startupinfo=get_subprocess_startupinfo(), check=True) + + +def get_image_id_by_digest(digest: str) -> str: + cmd = [ + get_runtime(), + "image", + "tag", + "-f", + f'digest="{digest}"', + "--format ", + "{{.Id}}", + ] + process = subprocess.run( + cmd, startupinfo=get_subprocess_startupinfo(), check=True, capture_output=True + ) + return process.stdout.decode().strip() + + +def load_image_tarball_in_memory( + compressed_container_path: Optional[str] = None, +) -> None: + if compressed_container_path is None: + compressed_container_path = get_resource_path("container.tar.gz") + + log.info("Installing Dangerzone container image...") + p = subprocess.Popen( + [get_runtime(), "load"], + stdin=subprocess.PIPE, + startupinfo=get_subprocess_startupinfo(), + ) + + chunk_size = 4 << 20 + + with gzip.open(compressed_container_path) as f: + while True: + chunk = f.read(chunk_size) + if len(chunk) > 0: + if p.stdin: + p.stdin.write(chunk) + else: + break + _, err = p.communicate() + if p.returncode < 0: + if err: + error = err.decode() + else: + error = "No output" + raise errors.ImageInstallationException( + f"Could not install container image: {error}" + ) + + log.info("Successfully installed container image from") + + +def load_image_tarball_file(container_path: str) -> None: + cmd = [get_runtime(), "load", "-i", container_path] + subprocess.run(cmd, startupinfo=get_subprocess_startupinfo(), check=True) + + log.info("Successfully installed container image from %s", container_path) + + +def container_pull(image: str) -> bool: + # XXX - Move to container_utils.py + cmd = [get_runtime_name(), "pull", f"{image}"] + process = subprocess.Popen(cmd, stdout=subprocess.PIPE) + process.communicate() + return process.returncode == 0 diff --git a/dangerzone/updater/cli.py b/dangerzone/updater/cli.py index 39fc16b..7dc9c35 100644 --- a/dangerzone/updater/cli.py +++ b/dangerzone/updater/cli.py @@ -7,16 +7,20 @@ import click from ..util import get_resource_path from . import errors, log, registry from .attestations import verify_attestation -from .signatures import upgrade_container_image, verify_offline_image_signature +from .signatures import ( + upgrade_container_image, + upgrade_container_image_airgapped, + verify_offline_image_signature, +) DEFAULT_REPOSITORY = "freedomofpress/dangerzone" - +DEFAULT_IMAGE_NAME = "ghcr.io/freedomofpress/dangerzone" PUBKEY_DEFAULT_LOCATION = get_resource_path("freedomofpress-dangerzone-pub.key") @click.group() @click.option("--debug", is_flag=True) -def main(debug=False) -> None: +def main(debug: bool) -> None: if debug: click.echo("Debug mode enabled") level = logging.DEBUG @@ -26,11 +30,9 @@ def main(debug=False) -> None: @main.command() -@click.option("--image") +@click.argument("image") @click.option("--pubkey", default=PUBKEY_DEFAULT_LOCATION) -@click.option("--airgap", is_flag=True) -# XXX Add options to do airgap upgrade -def upgrade(image: str, pubkey: str, airgap: bool) -> None: +def upgrade(image: str, pubkey: str) -> None: """Upgrade the image to the latest signed version.""" manifest_hash = registry.get_manifest_hash(image) try: @@ -41,6 +43,20 @@ def upgrade(image: str, pubkey: str, airgap: bool) -> None: raise click.Abort() +@main.command() +@click.argument("image_filename") +@click.option("--pubkey", default=PUBKEY_DEFAULT_LOCATION) +@click.option("--image-name", default=DEFAULT_IMAGE_NAME) +def upgrade_airgapped(image_filename: str, pubkey: str, image_name: str) -> None: + """Upgrade the image to the latest signed version.""" + try: + upgrade_container_image_airgapped(image_filename, pubkey, image_name) + click.echo(f"✅ Installed image {image_filename} on the system") + except errors.ImageAlreadyUpToDate as e: + click.echo(f"✅ {e}") + raise click.Abort() + + @main.command() @click.argument("image") @click.option("--pubkey", default=PUBKEY_DEFAULT_LOCATION) diff --git a/dangerzone/updater/registry.py b/dangerzone/updater/registry.py index 2fd4eca..a0285c1 100644 --- a/dangerzone/updater/registry.py +++ b/dangerzone/updater/registry.py @@ -12,7 +12,6 @@ __all__ = [ "list_tags", "get_manifest", "get_attestation", - "Image", "parse_image_location", ] diff --git a/dangerzone/updater/signatures.py b/dangerzone/updater/signatures.py index 898d95b..3680c49 100644 --- a/dangerzone/updater/signatures.py +++ b/dangerzone/updater/signatures.py @@ -2,15 +2,16 @@ import json import platform import re import subprocess +import tarfile from base64 import b64decode from hashlib import sha256 +from io import BytesIO from pathlib import Path -from tempfile import NamedTemporaryFile -from typing import Dict, List, Tuple +from tempfile import NamedTemporaryFile, TemporaryDirectory +from typing import Dict, List, Optional, Tuple -from ..container_utils import container_pull, load_image_hash -from . import errors, log, utils -from .registry import get_manifest_hash +from .. import container_utils as runtime +from . import errors, log, registry, utils try: import platformdirs @@ -54,12 +55,40 @@ def signature_to_bundle(sig: Dict) -> Dict: } -def verify_signature(signature: dict, pubkey: str) -> bool: +def cosign_verify_local_image(oci_image_folder: str, pubkey: str) -> bool: + """Verify the given path against the given public key""" + + utils.ensure_cosign() + cmd = [ + "cosign", + "verify", + "--key", + pubkey, + "--offline", + "--local-image", + oci_image_folder, + ] + log.debug(" ".join(cmd)) + result = subprocess.run(cmd, capture_output=True) + if result.returncode == 0: + log.debug("Signature verified") + return True + log.debug("Failed to verify signature", result.stderr) + return False + + +def verify_signature(signature: dict, image_hash: str, pubkey: str) -> bool: """Verify a signature against a given public key""" + # XXX - Also verfy the identity/docker-reference field against the expected value + # e.g. ghcr.io/freedomofpress/dangerzone/dangerzone utils.ensure_cosign() signature_bundle = signature_to_bundle(signature) + payload_bytes = b64decode(signature_bundle["Payload"]) + if json.loads(payload_bytes)["critical"]["type"] != f"sha256:{image_hash}": + raise errors.SignatureMismatch("The signature does not match the image hash") + with ( NamedTemporaryFile(mode="w") as signature_file, NamedTemporaryFile(mode="bw") as payload_file, @@ -67,7 +96,6 @@ def verify_signature(signature: dict, pubkey: str) -> bool: json.dump(signature_bundle, signature_file) signature_file.flush() - payload_bytes = b64decode(signature_bundle["Payload"]) payload_file.write(payload_bytes) payload_file.flush() @@ -92,47 +120,107 @@ def verify_signature(signature: dict, pubkey: str) -> bool: return False -def new_image_release(image) -> bool: - remote_hash = get_manifest_hash(image) - local_hash = load_image_hash(image) +def new_image_release(image: str) -> bool: + remote_hash = registry.get_manifest_hash(image) + local_hash = runtime.get_local_image_hash(image) log.debug("Remote hash: %s", remote_hash) log.debug("Local hash: %s", local_hash) return remote_hash != local_hash -def upgrade_container_image( - image: str, - manifest_hash: str, +def verify_signatures( + signatures: List[Dict], + image_hash: str, pubkey: str, ) -> bool: + for signature in signatures: + if not verify_signature(signature, image_hash, pubkey): + raise errors.SignatureVerificationError() + return True + + +def upgrade_container_image(image: str, manifest_hash: str, pubkey: str) -> bool: + """Verify and upgrade the image to the latest, if signed.""" if not new_image_release(image): raise errors.ImageAlreadyUpToDate("The image is already up to date") - return False - signatures = get_signatures(image, manifest_hash) - log.debug("Signatures: %s", signatures) - - if len(signatures) < 1: - raise errors.NoRemoteSignatures("No remote signatures found") - - for signature in signatures: - signature_is_valid = verify_signature(signature, pubkey) - if not signature_is_valid: - raise errors.SignatureVerificationError() + signatures = get_remote_signatures(image, manifest_hash) + verify_signatures(signatures, manifest_hash, pubkey) # At this point, the signatures are verified # We store the signatures just now to avoid storing unverified signatures store_signatures(signatures, manifest_hash, pubkey) # let's upgrade the image - # XXX Use the hash here to avoid race conditions - return container_pull(image) + # XXX Use the image digest here to avoid race conditions + return runtime.container_pull(image) -def get_file_hash(file: str) -> str: - with open(file, "rb") as f: - content = f.read() +def upgrade_container_image_airgapped( + container_tar: str, pubkey: str, image_name: str +) -> bool: + """ + Verify the given archive against its self-contained signatures, then + upgrade the image and retag it to the expected tag. + + Right now, the archive is extracted and reconstructed, requiring some space + on the filesystem. + """ + # XXX Use a memory buffer instead of the filesystem + with TemporaryDirectory() as tmpdir: + with tarfile.open(container_tar, "r") as archive: + archive.extractall(tmpdir) + + # XXX Check if the contained signatures match the given ones? + # Or maybe store both signatures? + if not cosign_verify_local_image(tmpdir, pubkey): + raise errors.SignatureVerificationError() + + # Remove the signatures from the archive. + with open(Path(tmpdir) / "index.json") as f: + index_json = json.load(f) + index_json["manifests"] = [ + manifest + for manifest in index_json["manifests"] + if manifest["annotations"].get("kind") + != "dev.cosignproject.cosign/sigs" + ] + + image_digest = index_json["manifests"][0].get("digest") + + # Write the new index.json to the temp folder + with open(Path(tmpdir) / "index.json", "w") as f: + json.dump(index_json, f) + + with NamedTemporaryFile(suffix=".tar") as temporary_tar: + with tarfile.open(temporary_tar.name, "w") as archive: + # The root is the tmpdir + archive.add(Path(tmpdir) / "index.json", arcname="index.json") + archive.add(Path(tmpdir) / "oci-layout", arcname="oci-layout") + archive.add(Path(tmpdir) / "blobs", arcname="blobs") + + runtime.load_image_tarball_file(temporary_tar.name) + runtime.tag_image_by_digest(image_digest, image_name) + + # XXX Convert the signatures to the expected format + + # At this point, the signatures are verified + # We store the signatures just now to avoid storing unverified signatures + # store_signatures(signatures, image_hash, pubkey) + + return True + + +def get_file_hash(file: Optional[str] = None, content: Optional[bytes] = None) -> str: + """Get the sha256 hash of a file or content""" + if not file and not content: + raise errors.UpdaterError("No file or content provided") + if file: + with open(file, "rb") as f: + content = f.read() + if content: return sha256(content).hexdigest() + return "" def load_signatures(image_hash: str, pubkey: str) -> List[Dict]: @@ -197,23 +285,21 @@ def verify_offline_image_signature(image: str, pubkey: str) -> bool: Verifies that a local image has a valid signature """ log.info(f"Verifying local image {image} against pubkey {pubkey}") - image_hash = load_image_hash(image) + image_hash = runtime.get_local_image_hash(image) log.debug(f"Image hash: {image_hash}") signatures = load_signatures(image_hash, pubkey) if len(signatures) < 1: raise errors.LocalSignatureNotFound("No signatures found") for signature in signatures: - if not verify_signature(signature, pubkey): + if not verify_signature(signature, image_hash, pubkey): msg = f"Unable to verify signature for {image} with pubkey {pubkey}" raise errors.SignatureVerificationError(msg) return True -def get_signatures(image: str, hash: str) -> List[Dict]: - """ - Retrieve the signatures from cosign download signature and convert each one to the "cosign bundle" format. - """ +def get_remote_signatures(image: str, hash: str) -> List[Dict]: + """Retrieve the signatures from the registry, via `cosign download`.""" utils.ensure_cosign() process = subprocess.run( @@ -225,4 +311,7 @@ def get_signatures(image: str, hash: str) -> List[Dict]: # XXX: Check the output first. # Remove the last return, split on newlines, convert from JSON signatures_raw = process.stdout.decode("utf-8").strip().split("\n") - return list(map(json.loads, signatures_raw)) + signatures = list(map(json.loads, signatures_raw)) + if len(signatures) < 1: + raise errors.NoRemoteSignatures("No signatures found for the image") + return signatures From 12aafa2606a11a71a28b0c32ef302b2ebbcd1045 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Alexis=20M=C3=A9taireau?= Date: Mon, 3 Feb 2025 20:18:10 +0100 Subject: [PATCH 18/18] Locally store the signatures for oci-images archives On air-gapped environements, it's now possible to load signatures generated by `cosign save` commands. The signatures embedded in this format will be converted to the one used by `cosign download signature`. --- dangerzone/container_utils.py | 12 +++++- dangerzone/updater/errors.py | 4 ++ dangerzone/updater/signatures.py | 71 +++++++++++++++++++++++++------- 3 files changed, 70 insertions(+), 17 deletions(-) diff --git a/dangerzone/container_utils.py b/dangerzone/container_utils.py index cc0683d..e9205e2 100644 --- a/dangerzone/container_utils.py +++ b/dangerzone/container_utils.py @@ -155,6 +155,9 @@ def load_image_tarball_file(tarball_path: str) -> None: def tag_image_by_digest(digest: str, tag: str) -> None: + """Tag a container image by digest. + The sha256: prefix should be omitted from the digest. + """ image_id = get_image_id_by_digest(digest) cmd = [get_runtime(), "tag", image_id, tag] log.debug(" ".join(cmd)) @@ -162,11 +165,14 @@ def tag_image_by_digest(digest: str, tag: str) -> None: def get_image_id_by_digest(digest: str) -> str: + """Get an image ID from a digest. + The sha256: prefix should be omitted from the digest. + """ cmd = [ get_runtime(), "images", "-f", - f"digest={digest}", + f"digest=sha256:{digest}", "--format", "{{.Id}}", ] @@ -174,7 +180,9 @@ def get_image_id_by_digest(digest: str) -> str: process = subprocess.run( cmd, startupinfo=get_subprocess_startupinfo(), check=True, capture_output=True ) - return process.stdout.decode().strip() + breakpoint() + # In case we have multiple lines, we only want the first one. + return process.stdout.decode().strip().split("\n")[0] def container_pull(image: str) -> bool: diff --git a/dangerzone/updater/errors.py b/dangerzone/updater/errors.py index 7297991..cd9c2b8 100644 --- a/dangerzone/updater/errors.py +++ b/dangerzone/updater/errors.py @@ -22,6 +22,10 @@ class SignatureVerificationError(SignatureError): pass +class SignatureExtractionError(SignatureError): + pass + + class SignaturesFolderDoesNotExist(SignatureError): pass diff --git a/dangerzone/updater/signatures.py b/dangerzone/updater/signatures.py index 3680c49..f38ea14 100644 --- a/dangerzone/updater/signatures.py +++ b/dangerzone/updater/signatures.py @@ -3,7 +3,7 @@ import platform import re import subprocess import tarfile -from base64 import b64decode +from base64 import b64decode, b64encode from hashlib import sha256 from io import BytesIO from pathlib import Path @@ -156,6 +156,10 @@ def upgrade_container_image(image: str, manifest_hash: str, pubkey: str) -> bool return runtime.container_pull(image) +def _get_blob(tmpdir: str, hash: str) -> Path: + return Path(tmpdir) / "blobs" / "sha256" / hash.replace("sha256:", "") + + def upgrade_container_image_airgapped( container_tar: str, pubkey: str, image_name: str ) -> bool: @@ -166,8 +170,19 @@ def upgrade_container_image_airgapped( Right now, the archive is extracted and reconstructed, requiring some space on the filesystem. """ + # XXX Use a memory buffer instead of the filesystem with TemporaryDirectory() as tmpdir: + + def _get_signature_filename(manifests: List[Dict]) -> Path: + for manifest in manifests: + if ( + manifest["annotations"].get("kind") + == "dev.cosignproject.cosign/sigs" + ): + return _get_blob(tmpdir, manifest["digest"]) + raise errors.SignatureExtractionError() + with tarfile.open(container_tar, "r") as archive: archive.extractall(tmpdir) @@ -179,14 +194,19 @@ def upgrade_container_image_airgapped( # Remove the signatures from the archive. with open(Path(tmpdir) / "index.json") as f: index_json = json.load(f) - index_json["manifests"] = [ - manifest - for manifest in index_json["manifests"] - if manifest["annotations"].get("kind") - != "dev.cosignproject.cosign/sigs" - ] - image_digest = index_json["manifests"][0].get("digest") + signature_filename = _get_signature_filename(index_json["manifests"]) + + index_json["manifests"] = [ + manifest + for manifest in index_json["manifests"] + if manifest["annotations"].get("kind") != "dev.cosignproject.cosign/sigs" + ] + + with open(signature_filename, "rb") as f: + signatures = convert_oci_images_signatures(json.load(f), tmpdir) + + image_digest = index_json["manifests"][0].get("digest").replace("sha256:", "") # Write the new index.json to the temp folder with open(Path(tmpdir) / "index.json", "w") as f: @@ -202,15 +222,34 @@ def upgrade_container_image_airgapped( runtime.load_image_tarball_file(temporary_tar.name) runtime.tag_image_by_digest(image_digest, image_name) - # XXX Convert the signatures to the expected format - - # At this point, the signatures are verified - # We store the signatures just now to avoid storing unverified signatures - # store_signatures(signatures, image_hash, pubkey) - + store_signatures(signatures, image_digest, pubkey) return True +def convert_oci_images_signatures( + signatures_manifest: List[Dict], tmpdir: str +) -> List[Dict]: + def _to_cosign_signature(layer: Dict) -> Dict: + signature = layer["annotations"]["dev.cosignproject.cosign/signature"] + bundle = json.loads(layer["annotations"]["dev.sigstore.cosign/bundle"]) + payload_body = json.loads(b64decode(bundle["Payload"]["body"])) + + payload_location = _get_blob(tmpdir, layer["digest"]) + with open(payload_location, "rb") as f: + payload_b64 = b64encode(f.read()).decode() + + return { + "Base64Signature": payload_body["spec"]["signature"]["content"], + "Payload": payload_b64, + "Cert": None, + "Chain": None, + "rekorBundle": bundle, + "RFC3161Timestamp": None, + } + + return [_to_cosign_signature(layer) for layer in signatures_manifest["layers"]] + + def get_file_hash(file: Optional[str] = None, content: Optional[bytes] = None) -> str: """Get the sha256 hash of a file or content""" if not file and not content: @@ -268,7 +307,9 @@ def store_signatures(signatures: list[Dict], image_hash: str, pubkey: str) -> No raise errors.InvalidSignatures("Signatures do not share the same image hash") if f"sha256:{image_hash}" != hashes[0]: - raise errors.SignatureMismatch("Signatures do not match the given image hash") + raise errors.SignatureMismatch( + f"Signatures do not match the given image hash ({image_hash}, {hashes[0]})" + ) pubkey_signatures = SIGNATURES_PATH / get_file_hash(pubkey) pubkey_signatures.mkdir(exist_ok=True)