Compare commits

...

72 commits

Author SHA1 Message Date
21e726e901
Merge 760948b5b5 into a6aa66f925 2025-02-25 17:20:31 +01:00
Alexis Métaireau
760948b5b5
Add tests for registry 2025-02-25 17:20:25 +01:00
Alexis Métaireau
3ea491761c fixup! Add a dangerzone-image CLI script 2025-02-25 17:20:12 +01:00
Alexis Métaireau
f175739b20 fixup! Add a dangerzone-image CLI script 2025-02-25 17:20:12 +01:00
Alexis Métaireau
3d579c8097 fixup! Add a dangerzone-image CLI script 2025-02-25 17:20:12 +01:00
Alexis Métaireau
356d848e47 fixup! Add a dangerzone-image CLI script 2025-02-25 17:20:12 +01:00
Alexis Métaireau
49c4cee898
make the signature tests pass 2025-02-25 15:44:46 +01:00
Alexis Métaireau
22d01a4045 fixup! c9c301d833 2025-02-25 15:44:24 +01:00
Alexis Métaireau
7e4cd66d2b fixup! b4818ce854 2025-02-25 15:44:24 +01:00
Alexis Métaireau
d93c99f8e2 fixup! b4818ce854 2025-02-25 15:44:24 +01:00
Alexis Métaireau
43f6d89bbb fixup! b37815a96c 2025-02-25 15:44:24 +01:00
Alexis Métaireau
bba427d619 fixup! 83418f09f2 2025-02-25 15:44:24 +01:00
Alexis Métaireau
4a4bf7c571 fixup! 3e861cc0cd 2025-02-25 15:44:24 +01:00
Alexis Métaireau
2476ed6daa fixup! Download and verify cosign signatures 2025-02-25 15:44:24 +01:00
Alexis Métaireau
30ec1f10e9 fixup! Download and verify cosign signatures 2025-02-25 15:44:24 +01:00
Alexis Métaireau
4073a62fd4 fixup! Download and verify cosign signatures 2025-02-25 15:44:24 +01:00
Alexis Métaireau
7f83505ae9 fixup! Download and verify cosign signatures 2025-02-25 15:44:24 +01:00
Alexis Métaireau
33ee158cf2 fixup! Download and verify cosign signatures 2025-02-25 15:44:24 +01:00
Alexis Métaireau
d5d3038bfa fixup! Download and verify cosign signatures 2025-02-25 15:44:24 +01:00
Alexis Métaireau
7e283196d8 fixup! 35704b8a18 2025-02-25 15:44:24 +01:00
Alexis Métaireau
8381b2fb7b fixup! (WIP) Add tests 2025-02-25 15:44:08 +01:00
Alexis Métaireau
7baddd0064 fixup! (WIP) Add tests 2025-02-25 15:44:08 +01:00
Alexis Métaireau
0c063b5b27 fixup! (WIP) Add tests 2025-02-25 15:44:08 +01:00
Alexis Métaireau
01f7b37151 fixup! (WIP) Add tests 2025-02-25 15:44:08 +01:00
Alexis Métaireau
9bf663fdb9 fixup! (WIP) Add tests 2025-02-25 15:44:08 +01:00
Alexis Métaireau
cf7a3dbb56 fixup! (WIP) Add tests 2025-02-25 15:44:08 +01:00
Alexis Métaireau
4621902a2b fixup! (WIP) Add tests 2025-02-25 15:44:08 +01:00
Alexis Métaireau
ec4028b486 fixup! (WIP) Add tests 2025-02-25 15:44:08 +01:00
Alexis Métaireau
43cb02bcca fixup! (WIP) Add tests 2025-02-25 15:44:08 +01:00
Alexis Métaireau
ab51a71bdf fixup! (WIP) Add tests 2025-02-25 15:44:08 +01:00
Alexis Métaireau
b5bfbb5d6e fixup! (WIP) Add tests 2025-02-25 15:44:08 +01:00
Alexis Métaireau
3e861cc0cd fixup! (WIP) Add tests 2025-02-25 15:44:08 +01:00
Alexis Métaireau
83418f09f2 fixup! (WIP) Add tests 2025-02-25 15:44:08 +01:00
Alexis Métaireau
fb89f00c73 fixup! (WIP) Add tests 2025-02-25 15:44:08 +01:00
Alexis Métaireau
ecb3d87b1f fixup! (WIP) Add tests 2025-02-25 15:44:08 +01:00
Alexis Métaireau
a4fa6aaed8 fixup! (WIP) Add tests 2025-02-25 15:44:08 +01:00
Alexis Métaireau
df3efa8157 fixup! 6aff845493 2025-02-25 15:44:08 +01:00
Alexis Métaireau
c9c301d833 fixup! (WIP) some more tests 2025-02-25 15:44:08 +01:00
Alexis Métaireau
b37815a96c fixup! (WIP) some more tests 2025-02-25 15:44:08 +01:00
Alexis Métaireau
35704b8a18 fixup! (WIP) some more tests 2025-02-25 15:44:08 +01:00
Alexis Métaireau
b4818ce854 fixup! (WIP) some more tests 2025-02-25 15:44:08 +01:00
Alexis Métaireau
0f2d81dbd6
(WIP) some more tests 2025-02-13 19:12:25 +01:00
Alexis Métaireau
a540fc5b08
(WIP) Add tests 2025-02-12 18:23:12 +01:00
Alexis Métaireau
835970b541 fixup! (WIP) Check for container updates rather than using image-id.txt 2025-02-12 12:05:20 +01:00
Alexis Métaireau
60674ea6b4 fixup! (WIP) Check for container updates rather than using image-id.txt 2025-02-12 11:53:36 +01:00
Alexis Métaireau
e078e9bb82 fixup! 1e9e468e37 2025-02-12 11:53:36 +01:00
Alexis Métaireau
5acb302acf fixup! Publish and attest multi-architecture container images 2025-02-12 11:40:36 +01:00
Alexis Métaireau
537d23e233 fixup! Publish and attest multi-architecture container images 2025-02-12 11:40:36 +01:00
Alexis Métaireau
0724f86b13 fixup! Publish and attest multi-architecture container images 2025-02-12 11:40:36 +01:00
Alexis Métaireau
668ee71895 fixup! Add a dangerzone-image CLI script 2025-02-12 11:40:36 +01:00
Alexis Métaireau
988971096c fixup! Add a dangerzone-image CLI script 2025-02-12 11:40:36 +01:00
Alexis Métaireau
5202d79270 fixup! Add a dangerzone-image CLI script 2025-02-12 11:40:36 +01:00
Alexis Métaireau
ccae6c5b16 fixup! Add a dangerzone-image CLI script 2025-02-12 11:40:36 +01:00
Alexis Métaireau
aac6c6334a fixup! Add a dangerzone-image CLI script 2025-02-12 11:40:36 +01:00
Alexis Métaireau
431f0cb803 fixup! Add a dangerzone-image CLI script 2025-02-12 11:40:36 +01:00
Alexis Métaireau
d667c284c7 fixup! Add a dangerzone-image CLI script 2025-02-12 11:40:36 +01:00
Alexis Métaireau
379c9f8f00 fixup! Add a dangerzone-image CLI script 2025-02-12 11:40:36 +01:00
Alexis Métaireau
1e9e468e37 fixup! Download and verify cosign signatures 2025-02-12 11:40:36 +01:00
Alexis Métaireau
5a4ddb17c9 fixup! Download and verify cosign signatures 2025-02-12 11:40:36 +01:00
Alexis Métaireau
22d235cabd fixup! Download and verify cosign signatures 2025-02-12 11:40:36 +01:00
Alexis Métaireau
5001328ae9 fixup! Download and verify cosign signatures 2025-02-12 11:40:36 +01:00
Alexis Métaireau
db33038c23 fixup! Download and verify cosign signatures 2025-02-12 11:40:36 +01:00
Alexis Métaireau
6aff845493 fixup! Download and verify cosign signatures 2025-02-12 11:40:36 +01:00
Alexis Métaireau
7002ab85a0 fixup! Download and verify cosign signatures 2025-02-12 11:40:36 +01:00
Alexis Métaireau
f6562ae59c fixup! Download and verify cosign signatures 2025-02-12 11:40:36 +01:00
Alexis Métaireau
27647cc309 fixup! Download and verify cosign signatures 2025-02-12 11:40:36 +01:00
Alexis Métaireau
5c9a38d370
(WIP) Check for container updates rather than using image-id.txt 2025-02-11 19:24:59 +01:00
Alexis Métaireau
af55d26c2e
Add documentation for independent container updates 2025-02-11 19:24:59 +01:00
Alex Pyrgiotis
f60c43f12b
Publish and attest multi-architecture container images
A new `dangerzone-image attest-provenance` script is now available,
making it possible to verify the attestations of an image published on
the github container registry.

Container images are now build nightly and uploaded to the container
registry.
2025-02-11 19:24:59 +01:00
Alexis Métaireau
197325b266
Add the ability to download diffoci for multiple platforms
The hash list provided on the Github releases page is now bundled in the
`reproduce-image.py` script, and the proper hashes are checked after
download.
2025-02-11 19:24:59 +01:00
Alexis Métaireau
3d28ae2eee
Download and verify cosign signatures
Signatures are stored in the OCI Manifest v2 registry [0], and are
expected to follow the Cosign Signature Specification [0]

The following CLI utilities are provided with `dangerzone-image`:

For checking new container images, upgrading them and downloading them:

- `upgrade` allows to upgrade the current installed image to the
  last one available on the OCI registry, downloading and storing the
  signatures in the process.
- `verify-local` allows the verify the currently installed image against
  downloaded signatures and public key.

To prepare and install archives on air-gapped environments:

- `prepare-archive` helps to prepare an archive to install on another
  machine
- `load-archive` helps upgrade the local image to the archive given
  in argument.

Signatures are stored locally using the format provided by `cosign
download signature`, and the Rekor log index is used to ensure the
requested-to-install container image is fresher than the one already
present on the system.

[0] https://github.com/sigstore/cosign/blob/main/specs/SIGNATURE_SPEC.md
2025-02-11 19:09:53 +01:00
Alexis Métaireau
81ee267591
Add a dangerzone-image CLI script
It contains utilities to interact with OCI registries, like getting the list of
published tags and getting the content of a manifest. It does so
via the use of the Docker Registry API v2 [0].

The script has been added to the `dev_scripts`, and is also installed on
the system under `dangerzone-image`.

[0]  https://docs.github.com/en/packages/working-with-a-github-packages-registry/working-with-the-container-registry
2025-02-11 18:13:39 +01:00
33 changed files with 2052 additions and 56 deletions

View file

@ -0,0 +1,168 @@
name: Release multi-arch container image
on:
workflow_dispatch:
push:
branches:
- main
- "test/**"
schedule:
- cron: "0 0 * * *" # Run every day at 00:00 UTC.
env:
REGISTRY: ghcr.io/${{ github.repository_owner }}
REGISTRY_USER: ${{ github.actor }}
REGISTRY_PASSWORD: ${{ github.token }}
IMAGE_NAME: dangerzone/dangerzone
jobs:
build:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
platform:
- linux/amd64
- linux/arm64
steps:
- uses: actions/checkout@v4
- name: Get current date
id: date
run: echo "date=$(date +'%Y%m%d')" >> $GITHUB_OUTPUT
- name: Prepare
run: |
platform=${{ matrix.platform }}
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: |
${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
- name: Login to GHCR
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Build and push by digest
id: build
uses: docker/build-push-action@v6
with:
context: ./dangerzone/
file: Dockerfile
build-args: |
DEBIAN_ARCHIVE_DATE=${{ steps.date.outputs.date }}
## Remove potentially incorrect Docker provenance.
#provenance: false
platforms: ${{ matrix.platform }}
labels: ${{ steps.meta.outputs.labels }}
outputs: type=image,"name=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}",push-by-digest=true,name-canonical=true,push=true
- name: Export digest
run: |
mkdir -p ${{ runner.temp }}/digests
digest="${{ steps.build.outputs.digest }}"
touch "${{ runner.temp }}/digests/${digest#sha256:}"
- name: Upload digest
uses: actions/upload-artifact@v4
with:
name: digests-${{ env.PLATFORM_PAIR }}
path: ${{ runner.temp }}/digests/*
if-no-files-found: error
retention-days: 1
merge:
runs-on: ubuntu-latest
needs:
- build
outputs:
digest: ${{ steps.image.outputs.digest }}
image: ${{ steps.image.outputs.image }}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Compute image tag
id: tag
run: |
DATE=$(date +'%Y%m%d')
TAG=$(git describe --long --first-parent | tail -c +2)
echo "tag=${DATE}-${TAG}" >> $GITHUB_OUTPUT
- name: Download digests
uses: actions/download-artifact@v4
with:
path: ${{ runner.temp }}/digests
pattern: digests-*
merge-multiple: true
- name: Login to GHCR
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
#- name: Docker meta
# id: meta
# uses: docker/metadata-action@v5
# with:
# images: |
# ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
# tags: |
# type=ref,event=branch
# type=ref,event=pr
# type=semver,pattern={{version}}
# type=semver,pattern={{major}}.{{minor}}
- name: Create manifest list and push
working-directory: ${{ runner.temp }}/digests
run: |
IMAGE=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:${{ steps.tag.outputs.tag }}
DIGESTS=$(printf '${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}@sha256:%s ' *)
docker buildx imagetools create -t ${IMAGE} ${DIGESTS}
- name: Inspect image
id: image
run: |
# NOTE: Set the image as an output because the `env` context is not
# available to the inputs of a reusable workflow call.
image_name="${REGISTRY}/${IMAGE_NAME}"
echo "image=$image_name" >> "$GITHUB_OUTPUT"
docker buildx imagetools inspect ${image_name}:${{ steps.tag.outputs.tag }}
digest=$(docker buildx imagetools inspect ${image_name}:${{ steps.tag.outputs.tag }} --format "{{json .Manifest}}" | jq -r '.digest')
echo "digest=$digest" >> "$GITHUB_OUTPUT"
# This step calls the container workflow to generate provenance and push it to
# the container registry.
provenance:
needs:
- merge
permissions:
actions: read # for detecting the Github Actions environment.
id-token: write # for creating OIDC tokens for signing.
packages: write # for uploading attestations.
uses: slsa-framework/slsa-github-generator/.github/workflows/generator_container_slsa3.yml@v2.0.0
with:
digest: ${{ needs.merge.outputs.digest }}
image: ${{ needs.merge.outputs.image }}
registry-username: ${{ github.actor }}
secrets:
registry-password: ${{ secrets.GITHUB_TOKEN }}

View file

@ -3,23 +3,22 @@ import logging
import platform
import shutil
import subprocess
from typing import List, Tuple
from typing import List, Optional, Tuple
from . import errors
from .util import get_resource_path, get_subprocess_startupinfo
CONTAINER_NAME = "dangerzone.rocks/dangerzone"
OLD_CONTAINER_NAME = "dangerzone.rocks/dangerzone"
CONTAINER_NAME = "ghcr.io/freedomofpress/dangerzone/dangerzone"
log = logging.getLogger(__name__)
def get_runtime_name() -> str:
if platform.system() == "Linux":
runtime_name = "podman"
else:
# Windows, Darwin, and unknown use docker for now, dangerzone-vm eventually
runtime_name = "docker"
return runtime_name
return "podman"
# Windows, Darwin, and unknown use docker for now, dangerzone-vm eventually
return "docker"
def get_runtime_version() -> Tuple[int, int]:
@ -112,13 +111,7 @@ def delete_image_tag(tag: str) -> None:
)
def get_expected_tag() -> str:
"""Get the tag of the Dangerzone image tarball from the image-id.txt file."""
with open(get_resource_path("image-id.txt")) as f:
return f.read().strip()
def load_image_tarball() -> None:
def load_image_tarball_from_gzip() -> None:
log.info("Installing Dangerzone container image...")
p = subprocess.Popen(
[get_runtime(), "load"],
@ -147,3 +140,75 @@ def load_image_tarball() -> None:
)
log.info("Successfully installed container image from")
def load_image_tarball_from_tar(tarball_path: str) -> None:
cmd = [get_runtime(), "load", "-i", tarball_path]
subprocess.run(cmd, startupinfo=get_subprocess_startupinfo(), check=True)
log.info("Successfully installed container image from %s", tarball_path)
def tag_image_by_digest(digest: str, tag: str) -> None:
"""Tag a container image by digest.
The sha256: prefix should be omitted from the digest.
"""
image_id = get_image_id_by_digest(digest)
cmd = [get_runtime(), "tag", image_id, tag]
log.debug(" ".join(cmd))
subprocess.run(cmd, startupinfo=get_subprocess_startupinfo(), check=True)
def get_image_id_by_digest(digest: str) -> str:
"""Get an image ID from a digest.
The sha256: prefix should be omitted from the digest.
"""
cmd = [
get_runtime(),
"images",
"-f",
f"digest=sha256:{digest}",
"--format",
"{{.Id}}",
]
log.debug(" ".join(cmd))
process = subprocess.run(
cmd, startupinfo=get_subprocess_startupinfo(), check=True, capture_output=True
)
# In case we have multiple lines, we only want the first one.
return process.stdout.decode().strip().split("\n")[0]
def container_pull(image: str) -> bool:
"""Pull a container image from a registry."""
cmd = [get_runtime_name(), "pull", f"{image}"]
process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
process.communicate()
return process.returncode == 0
def get_local_image_digest(image: str) -> str:
"""
Returns a image hash from a local image name
"""
# Get the image hash from the podman images command, as
# podman inspect returns a the digest of the architecture-bound image
cmd = [get_runtime_name(), "images", image, "--format", "{{.Digest}}"]
log.debug(" ".join(cmd))
try:
result = subprocess.run(cmd, capture_output=True, check=True)
lines = result.stdout.decode().strip().split("\n")
if len(lines) != 1:
raise errors.MultipleImagesFoundException(
f"Expected a single line of output, got {len(lines)} lines"
)
image_digest = lines[0].replace("sha256:", "")
if not image_digest:
raise errors.ImageNotPresentException(
f"The image {image} does not exist locally"
)
return image_digest
except subprocess.CalledProcessError as e:
raise errors.ImageNotPresentException(
f"The image {image} does not exist locally"
)

View file

@ -126,6 +126,10 @@ class ImageNotPresentException(Exception):
pass
class MultipleImagesFoundException(Exception):
pass
class ImageInstallationException(Exception):
pass

View file

@ -5,7 +5,7 @@ import shlex
import subprocess
from typing import List, Tuple
from .. import container_utils, errors
from .. import container_utils, errors, updater
from ..document import Document
from ..util import get_resource_path, get_subprocess_startupinfo
from .base import IsolationProvider, terminate_process_group
@ -78,41 +78,24 @@ class Container(IsolationProvider):
@staticmethod
def install() -> bool:
"""Install the container image tarball, or verify that it's already installed.
"""Check if an update is available and install it if necessary."""
# XXX Do this only if users have opted in to auto-updates
Perform the following actions:
1. Get the tags of any locally available images that match Dangerzone's image
name.
2. Get the expected image tag from the image-id.txt file.
- If this tag is present in the local images, then we can return.
- Else, prune the older container images and continue.
3. Load the image tarball and make sure it matches the expected tag.
"""
old_tags = container_utils.list_image_tags()
expected_tag = container_utils.get_expected_tag()
if expected_tag not in old_tags:
# Prune older container images.
log.info(
f"Could not find a Dangerzone container image with tag '{expected_tag}'"
if False: # Comment this for now, just as an exemple of this can be implemented
# # Load the image tarball into the container runtime.
update_available, image_digest = updater.is_update_available(
container_utils.CONTAINER_NAME
)
for tag in old_tags:
container_utils.delete_image_tag(tag)
else:
return True
if update_available and image_digest:
updater.upgrade_container_image(
container_utils.CONTAINER_NAME,
image_digest,
updater.DEFAULT_PUBKEY_LOCATION,
)
# Load the image tarball into the container runtime.
container_utils.load_image_tarball()
# Check that the container image has the expected image tag.
# See https://github.com/freedomofpress/dangerzone/issues/988 for an example
# where this was not the case.
new_tags = container_utils.list_image_tags()
if expected_tag not in new_tags:
raise errors.ImageNotPresentException(
f"Could not find expected tag '{expected_tag}' after loading the"
" container image tarball"
)
updater.verify_local_image(
container_utils.CONTAINER_NAME, updater.DEFAULT_PUBKEY_LOCATION
)
return True
@ -193,6 +176,14 @@ class Container(IsolationProvider):
name: str,
) -> subprocess.Popen:
container_runtime = container_utils.get_runtime()
image_digest = container_utils.get_local_image_digest(
container_utils.CONTAINER_NAME
)
updater.verify_local_image(
container_utils.CONTAINER_NAME,
updater.DEFAULT_PUBKEY_LOCATION,
)
security_args = self.get_runtime_security_args()
debug_args = []
if self.debug:
@ -201,9 +192,7 @@ class Container(IsolationProvider):
enable_stdin = ["-i"]
set_name = ["--name", name]
prevent_leakage_args = ["--rm"]
image_name = [
container_utils.CONTAINER_NAME + ":" + container_utils.get_expected_tag()
]
image_name = [container_utils.CONTAINER_NAME + "@sha256:" + image_digest]
args = (
["run"]
+ security_args

View file

@ -0,0 +1,10 @@
import logging
log = logging.getLogger(__name__)
from .signatures import (
DEFAULT_PUBKEY_LOCATION,
is_update_available,
upgrade_container_image,
verify_local_image,
)

View file

@ -0,0 +1,90 @@
import subprocess
from tempfile import NamedTemporaryFile
from . import cosign
# NOTE: You can grab the SLSA attestation for an image/tag pair with the following
# commands:
#
# IMAGE=ghcr.io/apyrgio/dangerzone/dangerzone
# TAG=20250129-0.8.0-149-gbf2f5ac
# DIGEST=$(crane digest ${IMAGE?}:${TAG?})
# ATT_MANIFEST=${IMAGE?}:${DIGEST/:/-}.att
# ATT_BLOB=${IMAGE?}@$(crane manifest ${ATT_MANIFEST?} | jq -r '.layers[0].digest')
# crane blob ${ATT_BLOB?} | jq -r '.payload' | base64 -d | jq
CUE_POLICY = r"""
// The predicateType field must match this string
predicateType: "https://slsa.dev/provenance/v0.2"
predicate: {{
// This condition verifies that the builder is the builder we
// expect and trust. The following condition can be used
// unmodified. It verifies that the builder is the container
// workflow.
builder: {{
id: =~"^https://github.com/slsa-framework/slsa-github-generator/.github/workflows/generator_container_slsa3.yml@refs/tags/v[0-9]+.[0-9]+.[0-9]+$"
}}
invocation: {{
configSource: {{
// This condition verifies the entrypoint of the workflow.
// Replace with the relative path to your workflow in your
// repository.
entryPoint: "{workflow}"
// This condition verifies that the image was generated from
// the source repository we expect. Replace this with your
// repository.
uri: =~"^git\\+https://github.com/{repository}@refs/heads/{branch}"
// Add a condition to check for a specific commit hash
digest: {{
sha1: "{commit}"
}}
}}
}}
}}
"""
def verify(
image_name: str,
branch: str,
commit: str,
repository: str,
workflow: str,
) -> bool:
"""
Look up the image attestation to see if the image has been built
on Github runners, and from a given repository.
"""
cosign.ensure_installed()
policy = CUE_POLICY.format(
repository=repository, workflow=workflow, commit=commit, branch=branch
)
# Put the value in files and verify with cosign
with (
NamedTemporaryFile(mode="w", suffix=".cue") as policy_f,
):
policy_f.write(policy)
policy_f.flush()
# Call cosign with the temporary file paths
cmd = [
"cosign",
"verify-attestation",
"--type",
"slsaprovenance",
"--policy",
policy_f.name,
"--certificate-oidc-issuer",
"https://token.actions.githubusercontent.com",
"--certificate-identity-regexp",
"^https://github.com/slsa-framework/slsa-github-generator/.github/workflows/generator_container_slsa3.yml@refs/tags/v[0-9]+.[0-9]+.[0-9]+$",
image_name,
]
result = subprocess.run(cmd, capture_output=True)
if result.returncode != 0:
error = result.stderr.decode()
raise Exception(f"Attestation cannot be verified. {error}")
return True

154
dangerzone/updater/cli.py Normal file
View file

@ -0,0 +1,154 @@
#!/usr/bin/python
import logging
import click
from . import attestations, errors, log, registry, signatures
DEFAULT_REPOSITORY = "freedomofpress/dangerzone"
DEFAULT_BRANCH = "main"
DEFAULT_IMAGE_NAME = "ghcr.io/freedomofpress/dangerzone/dangerzone"
@click.group()
@click.option("--debug", is_flag=True)
def main(debug: bool) -> None:
if debug:
click.echo("Debug mode enabled")
level = logging.DEBUG
else:
level = logging.INFO
logging.basicConfig(level=level)
@main.command()
@click.argument("image", default=DEFAULT_IMAGE_NAME)
@click.option("--pubkey", default=signatures.DEFAULT_PUBKEY_LOCATION)
def upgrade(image: str, pubkey: str) -> None:
"""Upgrade the image to the latest signed version."""
manifest_digest = registry.get_manifest_digest(image)
try:
is_upgraded = signatures.upgrade_container_image(image, manifest_digest, pubkey)
if is_upgraded:
click.echo(f"✅ The local image {image} has been upgraded")
click.echo(f"✅ The image has been signed with {pubkey}")
click.echo(f"✅ Signatures has been verified and stored locally")
except errors.ImageAlreadyUpToDate as e:
click.echo(f"{e}")
raise click.Abort()
@main.command()
@click.argument("image_filename")
@click.option("--pubkey", default=signatures.DEFAULT_PUBKEY_LOCATION)
def load_archive(image_filename: str, pubkey: str) -> None:
"""Upgrade the local image to the one in the archive."""
try:
loaded_image = signatures.upgrade_container_image_airgapped(
image_filename, pubkey
)
click.echo(
f"✅ Installed image {image_filename} on the system as {loaded_image}"
)
except errors.ImageAlreadyUpToDate as e:
click.echo(f"{e}")
raise click.Abort()
@main.command()
@click.argument("image")
@click.option("--output", default="dangerzone-airgapped.tar")
def prepare_archive(image: str, output: str) -> None:
"""Prepare an archive to upgrade the dangerzone image on an airgapped environment."""
signatures.prepare_airgapped_archive(image, output)
click.echo(f"✅ Archive {output} created")
@main.command()
@click.argument("image", default=DEFAULT_IMAGE_NAME)
@click.option("--pubkey", default=signatures.DEFAULT_PUBKEY_LOCATION)
def verify_local(image: str, pubkey: str) -> None:
"""
Verify the local image signature against a public key and the stored signatures.
"""
# XXX remove a potentiel :tag
if signatures.verify_local_image(image, pubkey):
click.echo(
(
f"Verifying the local image:\n\n"
f"pubkey: {pubkey}\n"
f"image: {image}\n\n"
f"✅ The local image {image} has been signed with {pubkey}"
)
)
@main.command()
@click.argument("image")
def list_remote_tags(image: str) -> None:
"""List the tags available for a given image."""
click.echo(f"Existing tags for {image}")
for tag in registry.list_tags(image):
click.echo(tag)
@main.command()
@click.argument("image")
def get_manifest(image: str) -> None:
"""Retrieves a remote manifest for a given image and displays it."""
click.echo(registry.get_manifest(image).content)
@main.command()
@click.argument("image_name")
# XXX: Do we really want to check against this?
@click.option(
"--branch",
default=DEFAULT_BRANCH,
help="The Git branch that the image was built from",
)
@click.option(
"--commit",
required=True,
help="The Git commit the image was built from",
)
@click.option(
"--repository",
default=DEFAULT_REPOSITORY,
help="The github repository to check the attestation for",
)
@click.option(
"--workflow",
default=".github/workflows/release-container-image.yml",
help="The path of the GitHub actions workflow this image was created from",
)
def attest_provenance(
image_name: str,
branch: str,
commit: str,
repository: str,
workflow: str,
) -> None:
"""
Look up the image attestation to see if the image has been built
on Github runners, and from a given repository.
"""
# TODO: Parse image and make sure it has a tag. Might even check for a digest.
# parsed = registry.parse_image_location(image)
verified = attestations.verify(image_name, branch, commit, repository, workflow)
if verified:
click.echo(
f"🎉 Successfully verified image '{image_name}' and its associated claims:"
)
click.echo(f"- ✅ SLSA Level 3 provenance")
click.echo(f"- ✅ GitHub repo: {repository}")
click.echo(f"- ✅ GitHub actions workflow: {workflow}")
click.echo(f"- ✅ Git branch: {branch}")
click.echo(f"- ✅ Git commit: {commit}")
if __name__ == "__main__":
main()

View file

@ -0,0 +1,32 @@
import subprocess
from . import errors, log
def ensure_installed() -> None:
try:
subprocess.run(["cosign", "version"], capture_output=True, check=True)
except subprocess.CalledProcessError:
raise errors.CosignNotInstalledError()
def verify_local_image(oci_image_folder: str, pubkey: str) -> bool:
"""Verify the given path against the given public key"""
ensure_installed()
cmd = [
"cosign",
"verify",
"--key",
pubkey,
"--offline",
"--local-image",
oci_image_folder,
]
log.debug(" ".join(cmd))
result = subprocess.run(cmd, capture_output=True)
if result.returncode == 0:
log.info("Signature verified")
return True
log.info("Failed to verify signature", result.stderr)
return False

View file

@ -0,0 +1,58 @@
class UpdaterError(Exception):
pass
class ImageAlreadyUpToDate(UpdaterError):
pass
class ImageNotFound(UpdaterError):
pass
class SignatureError(UpdaterError):
pass
class RegistryError(UpdaterError):
pass
class AirgappedImageDownloadError(UpdaterError):
pass
class NoRemoteSignatures(SignatureError):
pass
class SignatureVerificationError(SignatureError):
pass
class SignatureExtractionError(SignatureError):
pass
class SignaturesFolderDoesNotExist(SignatureError):
pass
class InvalidSignatures(SignatureError):
pass
class SignatureMismatch(SignatureError):
pass
class LocalSignatureNotFound(SignatureError):
pass
class CosignNotInstalledError(SignatureError):
pass
class InvalidLogIndex(SignatureError):
pass

View file

@ -0,0 +1,116 @@
import re
from collections import namedtuple
from hashlib import sha256
from typing import Dict, Optional, Tuple
import requests
from . import errors, log
__all__ = [
"get_manifest_digest",
"list_tags",
"get_manifest",
"parse_image_location",
]
SIGSTORE_BUNDLE = "application/vnd.dev.sigstore.bundle.v0.3+json"
IMAGE_INDEX_MEDIA_TYPE = "application/vnd.oci.image.index.v1+json"
ACCEPT_MANIFESTS_HEADER = ",".join(
[
"application/vnd.docker.distribution.manifest.v1+json",
"application/vnd.docker.distribution.manifest.v1+prettyjws",
"application/vnd.docker.distribution.manifest.v2+json",
"application/vnd.oci.image.manifest.v1+json",
"application/vnd.docker.distribution.manifest.list.v2+json",
IMAGE_INDEX_MEDIA_TYPE,
]
)
Image = namedtuple("Image", ["registry", "namespace", "image_name", "tag", "digest"])
def parse_image_location(input_string: str) -> Image:
"""Parses container image location into an Image namedtuple"""
pattern = (
r"^"
r"(?P<registry>[a-zA-Z0-9.-]+)/"
r"(?P<namespace>[a-zA-Z0-9-]+)/"
r"(?P<image_name>[^:@]+)"
r"(?::(?P<tag>[a-zA-Z0-9.-]+))?"
r"(?:@(?P<digest>sha256:[a-zA-Z0-9]+))?"
r"$"
)
match = re.match(pattern, input_string)
if not match:
raise ValueError("Malformed image location")
return Image(
registry=match.group("registry"),
namespace=match.group("namespace"),
image_name=match.group("image_name"),
tag=match.group("tag") or "latest",
digest=match.group("digest"),
)
def _get_auth_header(image: Image) -> Dict[str, str]:
auth_url = f"https://{image.registry}/token"
response = requests.get(
auth_url,
params={
"service": f"{image.registry}",
"scope": f"repository:{image.namespace}/{image.image_name}:pull",
},
)
response.raise_for_status()
token = response.json()["token"]
return {"Authorization": f"Bearer {token}"}
def _url(image: Image) -> str:
return f"https://{image.registry}/v2/{image.namespace}/{image.image_name}"
def list_tags(image_str: str) -> list:
image = parse_image_location(image_str)
url = f"{_url(image)}/tags/list"
response = requests.get(url, headers=_get_auth_header(image))
response.raise_for_status()
tags = response.json().get("tags", [])
return tags
def get_manifest(image_str: str) -> requests.Response:
"""Get manifest information for a specific tag"""
image = parse_image_location(image_str)
manifest_url = f"{_url(image)}/manifests/{image.tag}"
headers = {
"Accept": ACCEPT_MANIFESTS_HEADER,
}
headers.update(_get_auth_header(image))
response = requests.get(manifest_url, headers=headers)
response.raise_for_status()
return response
def list_manifests(image_str: str) -> list:
return get_manifest(image_str).json().get("manifests")
def get_blob(image: Image, digest: str) -> requests.Response:
response = requests.get(
f"{_url(image)}/blobs/{digest}", headers=_get_auth_header(image)
)
response.raise_for_status()
return response
def get_manifest_digest(
image_str: str, tag_manifest_content: Optional[bytes] = None
) -> str:
if not tag_manifest_content:
tag_manifest_content = get_manifest(image_str).content
return sha256(tag_manifest_content).hexdigest()

View file

@ -0,0 +1,481 @@
import json
import platform
import re
import subprocess
import tarfile
from base64 import b64decode, b64encode
from functools import reduce
from hashlib import sha256
from io import BytesIO
from pathlib import Path
from tempfile import NamedTemporaryFile, TemporaryDirectory
from typing import Dict, List, Optional, Tuple
from .. import container_utils as runtime
from .. import errors as dzerrors
from ..util import get_resource_path
from . import cosign, errors, log, registry
try:
import platformdirs
except ImportError:
import appdirs as platformdirs # type: ignore[no-redef]
def get_config_dir() -> Path:
return Path(platformdirs.user_config_dir("dangerzone"))
# XXX Store this somewhere else.
DEFAULT_PUBKEY_LOCATION = get_resource_path("freedomofpress-dangerzone-pub.key")
SIGNATURES_PATH = get_config_dir() / "signatures"
LAST_LOG_INDEX = SIGNATURES_PATH / "last_log_index"
__all__ = [
"verify_signature",
"load_and_verify_signatures",
"store_signatures",
"verify_offline_image_signature",
]
def signature_to_bundle(sig: Dict) -> Dict:
"""Convert a cosign-download signature to the format expected by cosign bundle."""
bundle = sig["Bundle"]
payload = bundle["Payload"]
return {
"base64Signature": sig["Base64Signature"],
"Payload": sig["Payload"],
"cert": sig["Cert"],
"chain": sig["Chain"],
"rekorBundle": {
"SignedEntryTimestamp": bundle["SignedEntryTimestamp"],
"Payload": {
"body": payload["body"],
"integratedTime": payload["integratedTime"],
"logIndex": payload["logIndex"],
"logID": payload["logID"],
},
},
"RFC3161Timestamp": sig["RFC3161Timestamp"],
}
def verify_signature(signature: dict, image_digest: str, pubkey: str | Path) -> bool:
"""Verify a signature against a given public key"""
# XXX - Also verfy the identity/docker-reference field against the expected value
# e.g. ghcr.io/freedomofpress/dangerzone/dangerzone
cosign.ensure_installed()
signature_bundle = signature_to_bundle(signature)
try:
payload_bytes = b64decode(signature_bundle["Payload"])
payload_digest = json.loads(payload_bytes)["critical"]["image"][
"docker-manifest-digest"
]
except Exception as e:
raise errors.SignatureVerificationError(
f"Unable to extract the payload digest from the signature: {e}"
)
if payload_digest != f"sha256:{image_digest}":
raise errors.SignatureMismatch(
f"The signature does not match the image digest ({payload_digest}, {image_digest})"
)
with (
NamedTemporaryFile(mode="w") as signature_file,
NamedTemporaryFile(mode="bw") as payload_file,
):
json.dump(signature_bundle, signature_file)
signature_file.flush()
payload_file.write(payload_bytes)
payload_file.flush()
if isinstance(pubkey, str):
pubkey = Path(pubkey)
cmd = [
"cosign",
"verify-blob",
"--key",
str(pubkey.absolute()),
"--bundle",
signature_file.name,
payload_file.name,
]
log.debug(" ".join(cmd))
result = subprocess.run(cmd, capture_output=True)
if result.returncode != 0:
# XXX Raise instead?
log.debug("Failed to verify signature", result.stderr)
raise errors.SignatureVerificationError("Failed to verify signature")
if result.stderr == b"Verified OK\n":
log.debug("Signature verified")
return True
return False
class Signature:
def __init__(self, signature: Dict):
self.signature = signature
@property
def payload(self) -> Dict:
return json.loads(b64decode(self.signature["Payload"]))
@property
def manifest_digest(self) -> str:
full_digest = self.payload["critical"]["image"]["docker-manifest-digest"]
return full_digest.replace("sha256:", "")
def is_update_available(image: str) -> Tuple[bool, Optional[str]]:
remote_digest = registry.get_manifest_digest(image)
try:
local_digest = runtime.get_local_image_digest(image)
except dzerrors.ImageNotPresentException:
log.debug("No local image found")
return True, remote_digest
log.debug("Remote digest: %s", remote_digest)
log.debug("Local digest: %s", local_digest)
has_update = remote_digest != local_digest
if has_update:
return True, remote_digest
return False, None
def verify_signatures(
signatures: List[Dict],
image_digest: str,
pubkey: str,
) -> bool:
if len(signatures) < 1:
raise errors.SignatureVerificationError("No signatures found")
for signature in signatures:
if not verify_signature(signature, image_digest, pubkey):
msg = f"Unable to verify signature for {image_digest} with pubkey {pubkey}"
raise errors.SignatureVerificationError(msg)
return True
def get_last_log_index() -> int:
SIGNATURES_PATH.mkdir(parents=True, exist_ok=True)
if not LAST_LOG_INDEX.exists():
return 0
with open(LAST_LOG_INDEX) as f:
return int(f.read())
def get_log_index_from_signatures(signatures: List[Dict]) -> int:
def _reducer(accumulator: int, signature: Dict) -> int:
try:
logIndex = int(signature["Bundle"]["Payload"]["logIndex"])
except (KeyError, ValueError):
return accumulator
return max(accumulator, logIndex)
return reduce(_reducer, signatures, 0)
def write_log_index(log_index: int) -> None:
last_log_index_path = SIGNATURES_PATH / "last_log_index"
with open(last_log_index_path, "w") as f:
f.write(str(log_index))
def _get_blob(tmpdir: str, digest: str) -> Path:
return Path(tmpdir) / "blobs" / "sha256" / digest.replace("sha256:", "")
def upgrade_container_image_airgapped(container_tar: str, pubkey: str) -> str:
"""
Verify the given archive against its self-contained signatures, then
upgrade the image and retag it to the expected tag.
Right now, the archive is extracted and reconstructed, requiring some space
on the filesystem.
:return: The loaded image name
"""
# XXX Use a memory buffer instead of the filesystem
with TemporaryDirectory() as tmpdir:
def _get_signature_filename(manifests: List[Dict]) -> Path:
for manifest in manifests:
if (
manifest["annotations"].get("kind")
== "dev.cosignproject.cosign/sigs"
):
return _get_blob(tmpdir, manifest["digest"])
raise errors.SignatureExtractionError()
with tarfile.open(container_tar, "r") as archive:
archive.extractall(tmpdir)
if not cosign.verify_local_image(tmpdir, pubkey):
raise errors.SignatureVerificationError()
# Remove the signatures from the archive, otherwise podman is not able to load it
with open(Path(tmpdir) / "index.json") as f:
index_json = json.load(f)
signature_filename = _get_signature_filename(index_json["manifests"])
index_json["manifests"] = [
manifest
for manifest in index_json["manifests"]
if manifest["annotations"].get("kind")
in ("dev.cosignproject.cosign/imageIndex", "dev.cosignproject.cosign/image")
]
with open(signature_filename, "r") as f:
image_name, signatures = convert_oci_images_signatures(json.load(f), tmpdir)
log.info(f"Found image name: {image_name}")
# Ensure that we only upgrade if the log index is higher than the last known one
incoming_log_index = get_log_index_from_signatures(signatures)
last_log_index = get_last_log_index()
if incoming_log_index < last_log_index:
raise errors.InvalidLogIndex(
"The log index is not higher than the last known one"
)
image_digest = index_json["manifests"][0].get("digest").replace("sha256:", "")
# Write the new index.json to the temp folder
with open(Path(tmpdir) / "index.json", "w") as f:
json.dump(index_json, f)
with NamedTemporaryFile(suffix=".tar") as temporary_tar:
with tarfile.open(temporary_tar.name, "w") as archive:
# The root is the tmpdir
archive.add(Path(tmpdir) / "index.json", arcname="index.json")
archive.add(Path(tmpdir) / "oci-layout", arcname="oci-layout")
archive.add(Path(tmpdir) / "blobs", arcname="blobs")
runtime.load_image_tarball_from_tar(temporary_tar.name)
runtime.tag_image_by_digest(image_digest, image_name)
store_signatures(signatures, image_digest, pubkey)
return image_name
def convert_oci_images_signatures(
signatures_manifest: Dict, tmpdir: str
) -> Tuple[str, List[Dict]]:
def _to_cosign_signature(layer: Dict) -> Dict:
signature = layer["annotations"]["dev.cosignproject.cosign/signature"]
bundle = json.loads(layer["annotations"]["dev.sigstore.cosign/bundle"])
payload_body = json.loads(b64decode(bundle["Payload"]["body"]))
payload_location = _get_blob(tmpdir, layer["digest"])
with open(payload_location, "rb") as f:
payload_b64 = b64encode(f.read()).decode()
return {
"Base64Signature": payload_body["spec"]["signature"]["content"],
"Payload": payload_b64,
"Cert": None,
"Chain": None,
"Bundle": bundle,
"RFC3161Timestamp": None,
}
layers = signatures_manifest.get("layers", [])
signatures = [_to_cosign_signature(layer) for layer in layers]
if not signatures:
raise errors.SignatureExtractionError()
payload_location = _get_blob(tmpdir, layers[0]["digest"])
with open(payload_location, "r") as f:
payload = json.load(f)
image_name = payload["critical"]["identity"]["docker-reference"]
return image_name, signatures
def get_file_digest(file: Optional[str] = None, content: Optional[bytes] = None) -> str:
"""Get the sha256 digest of a file or content"""
if not file and not content:
raise errors.UpdaterError("No file or content provided")
if file:
with open(file, "rb") as f:
content = f.read()
if content:
return sha256(content).hexdigest()
return ""
def load_and_verify_signatures(
image_digest: str,
pubkey: str,
bypass_verification: bool = False,
signatures_path: Optional[Path] = None,
) -> List[Dict]:
"""
Load signatures from the local filesystem
See store_signatures() for the expected format.
"""
if not signatures_path:
signatures_path = SIGNATURES_PATH
pubkey_signatures = signatures_path / get_file_digest(pubkey)
if not pubkey_signatures.exists():
msg = (
f"Cannot find a '{pubkey_signatures}' folder."
"You might need to download the image signatures first."
)
raise errors.SignaturesFolderDoesNotExist(msg)
with open(pubkey_signatures / f"{image_digest}.json") as f:
log.debug("Loading signatures from %s", f.name)
signatures = json.load(f)
if not bypass_verification:
verify_signatures(signatures, image_digest, pubkey)
return signatures
def store_signatures(signatures: list[Dict], image_digest: str, pubkey: str) -> None:
"""
Store signatures locally in the SIGNATURE_PATH folder, like this:
~/.config/dangerzone/signatures/
<pubkey-digest>
<image-digest>.json
<image-digest>.json
last_log_index
The last_log_index file is used to keep track of the last log index
processed by the updater.
The format used in the `.json` file is the one of `cosign download
signature`, which differs from the "bundle" one used afterwards.
It can be converted to the one expected by cosign verify --bundle with
the `signature_to_bundle()` function.
"""
def _get_digest(sig: Dict) -> str:
payload = json.loads(b64decode(sig["Payload"]))
return payload["critical"]["image"]["docker-manifest-digest"]
# All the signatures should share the same digest.
digests = list(map(_get_digest, signatures))
if len(set(digests)) != 1:
raise errors.InvalidSignatures("Signatures do not share the same image digest")
if f"sha256:{image_digest}" != digests[0]:
raise errors.SignatureMismatch(
f"Signatures do not match the given image digest (sha256:{image_digest}, {digests[0]})"
)
pubkey_signatures = SIGNATURES_PATH / get_file_digest(pubkey)
pubkey_signatures.mkdir(parents=True, exist_ok=True)
with open(pubkey_signatures / f"{image_digest}.json", "w") as f:
log.info(
f"Storing signatures for {image_digest} in {pubkey_signatures}/{image_digest}.json"
)
json.dump(signatures, f)
write_log_index(get_log_index_from_signatures(signatures))
def verify_local_image(image: str, pubkey: str) -> bool:
"""
Verifies that a local image has a valid signature
"""
log.info(f"Verifying local image {image} against pubkey {pubkey}")
try:
image_digest = runtime.get_local_image_digest(image)
except subprocess.CalledProcessError:
raise errors.ImageNotFound(f"The image {image} does not exist locally")
log.debug(f"Image digest: {image_digest}")
load_and_verify_signatures(image_digest, pubkey)
return True
def get_remote_signatures(image: str, digest: str) -> List[Dict]:
"""Retrieve the signatures from the registry, via `cosign download signatures`."""
cosign.ensure_installed()
try:
process = subprocess.run(
["cosign", "download", "signature", f"{image}@sha256:{digest}"],
capture_output=True,
check=True,
)
except subprocess.CalledProcessError as e:
raise errors.NoRemoteSignatures(e)
# Remove the last return, split on newlines, convert from JSON
signatures_raw = process.stdout.decode("utf-8").strip().split("\n")
signatures = list(filter(bool, map(json.loads, signatures_raw)))
if len(signatures) < 1:
raise errors.NoRemoteSignatures("No signatures found for the image")
return signatures
def prepare_airgapped_archive(image_name: str, destination: str) -> None:
if "@sha256:" not in image_name:
raise errors.AirgappedImageDownloadError(
"The image name must include a digest, e.g. ghcr.io/freedomofpress/dangerzone/dangerzone@sha256:123456"
)
cosign.ensure_installed()
# Get the image from the registry
with TemporaryDirectory() as tmpdir:
msg = f"Downloading image {image_name}. \nIt might take a while."
log.info(msg)
process = subprocess.run(
["cosign", "save", image_name, "--dir", tmpdir],
capture_output=True,
check=True,
)
if process.returncode != 0:
raise errors.AirgappedImageDownloadError()
with tarfile.open(destination, "w") as archive:
archive.add(tmpdir, arcname=".")
def upgrade_container_image(image: str, manifest_digest: str, pubkey: str) -> bool:
"""Verify and upgrade the image to the latest, if signed."""
update_available, _ = is_update_available(image)
if not update_available:
raise errors.ImageAlreadyUpToDate("The image is already up to date")
signatures = get_remote_signatures(image, manifest_digest)
verify_signatures(signatures, manifest_digest, pubkey)
# Ensure that we only upgrade if the log index is higher than the last known one
incoming_log_index = get_log_index_from_signatures(signatures)
last_log_index = get_last_log_index()
if incoming_log_index < last_log_index:
raise errors.InvalidLogIndex(
"The log index is not higher than the last known one"
)
# let's upgrade the image
# XXX Use the image digest here to avoid race conditions
upgraded = runtime.container_pull(image)
# At this point, the signatures are verified
# We store the signatures just now to avoid storing unverified signatures
store_signatures(signatures, manifest_digest, pubkey)
return upgraded

View file

@ -8,7 +8,7 @@ import unicodedata
try:
import platformdirs
except ImportError:
import appdirs as platformdirs
import appdirs as platformdirs # type: ignore[no-redef]
def get_config_dir() -> str:

13
dev_scripts/dangerzone-image Executable file
View file

@ -0,0 +1,13 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import sys
# Load dangerzone module and resources from the source code tree
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.dangerzone_dev = True
from dangerzone.updater import cli
cli.main()

View file

@ -4,6 +4,7 @@ import argparse
import hashlib
import logging
import pathlib
import platform
import stat
import subprocess
import sys
@ -11,8 +12,20 @@ import urllib.request
logger = logging.getLogger(__name__)
DIFFOCI_URL = "https://github.com/reproducible-containers/diffoci/releases/download/v0.1.5/diffoci-v0.1.5.linux-amd64"
DIFFOCI_CHECKSUM = "01d25fe690196945a6bd510d30559338aa489c034d3a1b895a0d82a4b860698f"
DIFFOCI_VERSION = "v0.1.5"
# https://github.com/reproducible-containers/diffoci/releases/download/v0.1.5/SHA256SUMS
DIFFOCI_CHECKSUMS = """
ae171821b18c3b9e5cd1953323e79fe5ec1e972e9586474b18227b2cd052e695 diffoci-v0.1.5.darwin-amd64
fadabdac9be45fb3dfe2a53986422e53dcc6e1fdc8062713c5760e8959a37c2b diffoci-v0.1.5.darwin-arm64
01d25fe690196945a6bd510d30559338aa489c034d3a1b895a0d82a4b860698f diffoci-v0.1.5.linux-amd64
5cbc5d13b51183e2988ee0f406d428eb846d51b7c2c12ae17d0775371f43103e diffoci-v0.1.5.linux-arm-v7
2d067bd1af8a26b2c206c6bf2bde9bcb21062ddb5dc575e110e0e1a93d0d065f diffoci-v0.1.5.linux-arm64
0923f0c01f270c596fea9f84e529af958d6caba3fa0f6bf4f03df2a12f23b3fc diffoci-v0.1.5.linux-ppc64le
5821cbc299a90caa167c3a91465292907077ca1123375f88165a842b8970e710 diffoci-v0.1.5.linux-riscv64
917d7f23d2bd8fcc755cb2f722fc50ffd83389e04838c3b6e9c3463ea96a9be1 diffoci-v0.1.5.linux-s390x
"""
DIFFOCI_URL = "https://github.com/reproducible-containers/diffoci/releases/download/{version}/diffoci-{version}.{arch}"
DIFFOCI_PATH = (
pathlib.Path.home() / ".local" / "share" / "dangerzone-dev" / "helpers" / "diffoci"
)
@ -44,12 +57,31 @@ def git_verify(commit, source):
)
def get_platform_arch():
system = platform.system().lower()
arch = platform.machine().lower()
if arch == "x86_64":
arch = "amd64"
return f"{system}-{arch}"
def parse_checksums():
lines = [
line.replace(f"diffoci-{DIFFOCI_VERSION}.", "").split(" ")
for line in DIFFOCI_CHECKSUMS.split("\n")
if line
]
return {arch: checksum for checksum, arch in lines}
def diffoci_hash_matches(diffoci):
"""Check if the hash of the downloaded diffoci bin matches the expected one."""
arch = get_platform_arch()
expected_checksum = parse_checksums().get(arch)
m = hashlib.sha256()
m.update(diffoci)
diffoci_checksum = m.hexdigest()
return diffoci_checksum == DIFFOCI_CHECKSUM
return diffoci_checksum == expected_checksum
def diffoci_is_installed():
@ -66,7 +98,9 @@ def diffoci_is_installed():
def diffoci_download():
"""Download the diffoci tool, based on a URL and its checksum."""
with urllib.request.urlopen(DIFFOCI_URL) as f:
download_url = DIFFOCI_URL.format(version=DIFFOCI_VERSION, arch=get_platform_arch())
logger.info(f"Downloading diffoci helper from {download_url}")
with urllib.request.urlopen(download_url) as f:
diffoci_bin = f.read()
if not diffoci_hash_matches(diffoci_bin):
@ -153,7 +187,6 @@ def main():
git_verify(commit, args.source)
if not diffoci_is_installed():
logger.info(f"Downloading diffoci helper from {DIFFOCI_URL}")
diffoci_download()
tag = f"reproduce-{commit}"

View file

@ -0,0 +1,83 @@
# Independent Container Updates
Since version 0.9.0, Dangerzone is able to ship container images independently
from releases of the software.
One of the main benefits of doing so is to shorten the time neede to distribute the security fixes for the containers. Being the place where the actual conversion of documents happen, it's a way to keep dangerzone users secure.
If you are a dangerzone user, this all happens behind the curtain, and you should not have to know anything about that to enjoy these "in-app" updates. If you are using dangerzone in an air-gapped environment, check the sections below.
## Checking attestations
Each night, new images are built and pushed to the container registry, alongside
with a provenance attestation, enabling anybody to ensure that the image has
been originally built by Github CI runners, from a defined source repository (in our case `freedomofpress/dangerzone`).
To verify the attestations against our expectations, use the following command:
```bash
dangerzone-image attest-provenance ghcr.io/freedomofpress/dangerzone/dangerzone --repository freedomofpress/dangerzone
```
In case of sucess, it will report back:
```
🎉 Successfully verified image
'ghcr.io/freedomofpress/dangerzone/dangerzone:<tag>@sha256:<digest>'
and its associated claims:
- ✅ SLSA Level 3 provenance
- ✅ GitHub repo: freedomofpress/dangerzone
- ✅ GitHub actions workflow: <workflow>
- ✅ Git branch: <branch>
- ✅ Git commit: <commit>
```
## Sign and publish the remote image
Once the image has been reproduced locally, we can add a signature to the container registry,
and update the `latest` tag to point to the proper hash.
```bash
cosign sign --sk ghcr.io/freedomofpress/dangerzone/dangerzone:${TAG}@sha256:${DIGEST}
# And mark bump latest
crane auth login ghcr.io -u USERNAME --password $(cat pat_token)
crane tag ghcr.io/freedomofpress/dangerzone/dangerzone@sha256:${DIGEST} latest
```
## Install updates
To check if a new container image has been released, and update your local installation with it, you can use the following commands:
```bash
dangerzone-image upgrade ghcr.io/freedomofpress/dangerzone/dangerzone
```
## Verify locally
You can verify that the image you have locally matches the stored signatures, and that these have been signed with a trusted public key:
```bash
dangerzone-image verify-local ghcr.io/freedomofpress/dangerzone/dangerzone
```
## Installing image updates to air-gapped environments
Three steps are required:
1. Prepare the archive
2. Transfer the archive to the air-gapped system
3. Install the archive on the air-gapped system
This archive will contain all the needed material to validate that the new container image has been signed and is valid.
On the machine on which you prepare the packages:
```bash
dangerzone-image prepare-archive --output dz-fa94872.tar ghcr.io/freedomofpress/dangerzone/dangerzone@sha256:<digest>
```
On the airgapped machine, copy the file and run the following command:
```bash
dangerzone-image load-archive dz-fa94872.tar
```

View file

@ -27,6 +27,7 @@ packaging = "*"
[tool.poetry.scripts]
dangerzone = 'dangerzone:main'
dangerzone-cli = 'dangerzone:main'
dangerzone-image = "dangerzone.updater.cli:main"
# Dependencies required for packaging the code on various platforms.
[tool.poetry.group.package.dependencies]

View file

@ -0,0 +1,7 @@
This folder contains signature-folders used for the testing the signatures implementation.
The following folders are used:
- `valid`: this folder contains signatures which should be considered valid and generated with the key available at `tests/assets/test.pub.key`
- `invalid`: this folder contains signatures which should be considered invalid, because their format doesn't match the expected one. e.g. it uses plain text instead of base64-encoded text.
- `tempered`: This folder contain signatures which have been tempered-with. The goal is to have signatures that looks valid, but actually aren't.

View file

@ -0,0 +1,18 @@
[
{
"Base64Signature": "Invalid base64 signature",
"Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hbG1ldC9kYW5nZXJ6b25lL2RhbmdlcnpvbmUifSwiaW1hZ2UiOnsiZG9ja2VyLW1hbmlmZXN0LWRpZ2VzdCI6InNoYTI1NjoxOWU4ZWFjZDc1ODc5ZDA1ZjY2MjFjMmVhOGRkOTU1ZTY4ZWUzZTA3YjQxYjlkNTNmNGM4Y2M5OTI5YTY4YTY3In0sInR5cGUiOiJjb3NpZ24gY29udGFpbmVyIGltYWdlIHNpZ25hdHVyZSJ9LCJvcHRpb25hbCI6bnVsbH0=",
"Cert": null,
"Chain": null,
"Bundle": {
"SignedEntryTimestamp": "MEUCIC9oXH9VVP96frVOmDw704FBqMN/Bpm2RMdTm6BtSwL/AiEA6mCIjhV65fYuy4CwjsIzQHi/oW6IBwtd6oCvN2dI6HQ=",
"Payload": {
"body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiJmMjEwNDJjY2RjOGU0ZjA1ZGEzNmE5ZjU4ODg5MmFlZGRlMzYzZTQ2ZWNjZGZjM2MyNzAyMTkwZDU0YTdmZmVlIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FWUNJUUNWaTJGUFI3Mjl1aHAvY3JFdUNTOW9yQzRhMnV0OHN3dDdTUnZXYUVSTGp3SWhBSlM1dzU3MHhsQnJsM2Nhd1Y1akQ1dk85RGh1dkNrdCtzOXJLdGc2NzVKQSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=",
"integratedTime": 1738752154,
"logIndex": 168898587,
"logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d"
}
},
"RFC3161Timestamp": null
}
]

View file

@ -0,0 +1,18 @@
[
{
"Base64Signature": "MEQCICi2AOAJbS1k3334VMSo+qxaI4f5VoNnuVExZ4tfIu7rAiAiwuKdo8rGfFMGMLSFSQvoLF3JuwFy4JtNW6kQlwH7vg==",
"Payload": "Invalid base64 payload",
"Cert": null,
"Chain": null,
"Bundle": {
"SignedEntryTimestamp": "MEUCIEvx6NtFeAag9TplqMLjVczT/tC6lpKe9SnrxbehBlxfAiEA07BE3f5JsMLsUsmHD58D6GaZr2yz+yQ66Os2ps8oKz8=",
"Payload": {
"body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiI4YmJmNGRiNjBmMmExM2IyNjI2NTI3MzljNWM5ZTYwNjNiMDYyNjVlODU1Zjc3MTdjMTdlYWY4YzViZTQyYWUyIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FUUNJQ2kyQU9BSmJTMWszMzM0Vk1TbytxeGFJNGY1Vm9ObnVWRXhaNHRmSXU3ckFpQWl3dUtkbzhyR2ZGTUdNTFNGU1F2b0xGM0p1d0Z5NEp0Tlc2a1Fsd0g3dmc9PSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=",
"integratedTime": 1738859497,
"logIndex": 169356501,
"logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d"
}
},
"RFC3161Timestamp": null
}
]

View file

@ -0,0 +1,18 @@
[
{
"Base64Signature": "MEQCIDJxvB7lBU+VNYBD0xw/3Bi8wY7GPJ2fBP7mUFbguApoAiAIpuQT+sgatOY6yXkkA8K/sM40d5/gt7jQywWPbq5+iw==",
"Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hcHlyZ2lvL2RhbmdlcnpvbmUvZGFuZ2Vyem9uZSJ9LCJpbWFnZSI6eyJkb2NrZXItbWFuaWZlc3QtZGlnZXN0Ijoic2hhMjU2OjRkYTQ0MTIzNWU4NGU5MzUxODc3ODgyN2E1YzU3NDVkNTMyZDdhNDA3OTg4NmUxNjQ3OTI0YmVlN2VmMWMxNGQifSwidHlwZSI6ImNvc2lnbiBjb250YWluZXIgaW1hZ2Ugc2lnbmF0dXJlIn0sIm9wdGlvbmFsIjpudWxsfQ==",
"Cert": null,
"Chain": null,
"Bundle": {
"SignedEntryTimestamp": "Invalid signed entry timestamp",
"Payload": {
"body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiIyMGE2ZDU1NTk4Y2U0NjU3NWZkZjViZGU3YzhhYWE2YTU2ZjZlMGRmOWNiYTY1MTJhMDAxODhjMTU1NGIzYjE3In19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FUUNJREp4dkI3bEJVK1ZOWUJEMHh3LzNCaTh3WTdHUEoyZkJQN21VRmJndUFwb0FpQUlwdVFUK3NnYXRPWTZ5WGtrQThLL3NNNDBkNS9ndDdqUXl3V1BicTUraXc9PSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=",
"integratedTime": 1738688492,
"logIndex": 168652066,
"logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d"
}
},
"RFC3161Timestamp": null
}
]

View file

@ -0,0 +1,18 @@
[
{
"Base64Signature": "MEUCIQC2WlJH+B8VuX1c6i4sDwEGEZc53hXUD6/ds9TMJ3HrfwIgCxSnrNYRD2c8XENqfqc+Ik1gx0DK9kPNsn/Lt8V/dCo=",
"Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hbG1ldC9kYW5nZXJ6b25lL2RhbmdlcnpvbmUifSwiaW1hZ2UiOnsiZG9ja2VyLW1hbmlmZXN0LWRpZ2VzdCI6InNoYTI1Njo3YjIxZGJkZWJmZmVkODU1NjIxZGZjZGVhYTUyMjMwZGM2NTY2OTk3Zjg1MmVmNWQ2MmIwMzM4YjQ2Nzk2ZTAxIn0sInR5cGUiOiJjb3NpZ24gY29udGFpbmVyIGltYWdlIHNpZ25hdHVyZSJ9LCJvcHRpb25hbCI6bnVsbH0=",
"Cert": null,
"Chain": null,
"Bundle": {
"SignedEntryTimestamp": "MEYCIQDn04gOHqiZcwUO+NVV9+29+abu6O/k1ve9zatJ3gVu9QIhAJL3E+mqVPdMPfMSdhHt2XDQsYzfRDDJNJEABQlbV3Jg",
"Payload": {
"body": "Invalid bundle payload body",
"integratedTime": 1738862352,
"logIndex": 169369149,
"logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d"
}
},
"RFC3161Timestamp": null
}
]

View file

@ -0,0 +1,18 @@
[
{
"Base64Signature": "MAIhAJWLYU9Hvb26Gn9ysS4JL2isLhra63yzC3tJG9ZoREuPAiEAlLnDnvTGUGuXdxrBXmMPm870OG68KS36z2sq2DrvkkAK",
"Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hbG1ldC9kYW5nZXJ6b25lL2RhbmdlcnpvbmUifSwiaW1hZ2UiOnsiZG9ja2VyLW1hbmlmZXN0LWRpZ2VzdCI6InNoYTI1NjoxOWU4ZWFjZDc1ODc5ZDA1ZjY2MjFjMmVhOGRkOTU1ZTY4ZWUzZTA3YjQxYjlkNTNmNGM4Y2M5OTI5YTY4YTY3In0sInR5cGUiOiJjb3NpZ24gY29udGFpbmVyIGltYWdlIHNpZ25hdHVyZSJ9LCJvcHRpb25hbCI6bnVsbH0=",
"Cert": null,
"Chain": null,
"Bundle": {
"SignedEntryTimestamp": "MEUCIC9oXH9VVP96frVOmDw704FBqMN/Bpm2RMdTm6BtSwL/AiEA6mCIjhV65fYuy4CwjsIzQHi/oW6IBwtd6oCvN2dI6HQ=",
"Payload": {
"body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiJmMjEwNDJjY2RjOGU0ZjA1ZGEzNmE5ZjU4ODg5MmFlZGRlMzYzZTQ2ZWNjZGZjM2MyNzAyMTkwZDU0YTdmZmVlIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FWUNJUUNWaTJGUFI3Mjl1aHAvY3JFdUNTOW9yQzRhMnV0OHN3dDdTUnZXYUVSTGp3SWhBSlM1dzU3MHhsQnJsM2Nhd1Y1akQ1dk85RGh1dkNrdCtzOXJLdGc2NzVKQSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=",
"integratedTime": 1738752154,
"logIndex": 168898587,
"logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d"
}
},
"RFC3161Timestamp": null
}
]

View file

@ -0,0 +1,18 @@
[
{
"Base64Signature": "MEQCICi2AOAJbS1k3334VMSo+qxaI4f5VoNnuVExZ4tfIu7rAiAiwuKdo8rGfFMGMLSFSQvoLF3JuwFy4JtNW6kQlwH7vg==",
"Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9oNHh4MHIvZGFuZ2Vyem9uZS9kYW5nZXJ6b25lIn0sImltYWdlIjp7ImRvY2tlci1tYW5pZmVzdC1kaWdlc3QiOiJzaGEyNTY6MjIwYjUyMjAwZTNlNDdiMWI0MjAxMDY2N2ZjYWE5MzM4NjgxZTY0ZGQzZTM0YTM0ODczODY2Y2IwNTFkNjk0ZSJ9LCJ0eXBlIjoiY29zaWduIGNvbnRhaW5lciBpbWFnZSBzaWduYXR1cmUifSwib3B0aW9uYWwiOm51bGx9Cg==",
"Cert": null,
"Chain": null,
"Bundle": {
"SignedEntryTimestamp": "MEUCIEvx6NtFeAag9TplqMLjVczT/tC6lpKe9SnrxbehBlxfAiEA07BE3f5JsMLsUsmHD58D6GaZr2yz+yQ66Os2ps8oKz8=",
"Payload": {
"body": "eyJhcGlWZXJzaW9uIjoiNi42LjYiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiI4YmJmNGRiNjBmMmExM2IyNjI2NTI3MzljNWM5ZTYwNjNiMDYyNjVlODU1Zjc3MTdjMTdlYWY4YzViZTQyYWUyIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FUUNJQ2kyQU9BSmJTMWszMzM0Vk1TbytxeGFJNGY1Vm9ObnVWRXhaNHRmSXU3ckFpQWl3dUtkbzhyR2ZGTUdNTFNGU1F2b0xGM0p1d0Z5NEp0Tlc2a1Fsd0g3dmc9PSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0K",
"integratedTime": 1738859497,
"logIndex": 169356501,
"logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d"
}
},
"RFC3161Timestamp": null
}
]

View file

@ -0,0 +1 @@
This folder contain signatures which have been tempered-with. The goal is to have signatures that looks valid, but actually aren't.

View file

@ -0,0 +1 @@
[{"Base64Signature": "MEYCIQCVi2FPR729uhp/crEuCS9orC4a2ut8swt7SRvWaERLjwIhAJS5w570xlBrl3cawV5jD5vO9DhuvCkt+s9rKtg675JA", "Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hbG1ldC9kYW5nZXJ6b25lL2RhbmdlcnpvbmUifSwiaW1hZ2UiOnsiZG9ja2VyLW1hbmlmZXN0LWRpZ2VzdCI6InNoYTI1NjoxOWU4ZWFjZDc1ODc5ZDA1ZjY2MjFjMmVhOGRkOTU1ZTY4ZWUzZTA3YjQxYjlkNTNmNGM4Y2M5OTI5YTY4YTY3In0sInR5cGUiOiJjb3NpZ24gY29udGFpbmVyIGltYWdlIHNpZ25hdHVyZSJ9LCJvcHRpb25hbCI6bnVsbH0=", "Cert": null, "Chain": null, "Bundle": {"SignedEntryTimestamp": "MEUCIC9oXH9VVP96frVOmDw704FBqMN/Bpm2RMdTm6BtSwL/AiEA6mCIjhV65fYuy4CwjsIzQHi/oW6IBwtd6oCvN2dI6HQ=", "Payload": {"body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiJmMjEwNDJjY2RjOGU0ZjA1ZGEzNmE5ZjU4ODg5MmFlZGRlMzYzZTQ2ZWNjZGZjM2MyNzAyMTkwZDU0YTdmZmVlIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FWUNJUUNWaTJGUFI3Mjl1aHAvY3JFdUNTOW9yQzRhMnV0OHN3dDdTUnZXYUVSTGp3SWhBSlM1dzU3MHhsQnJsM2Nhd1Y1akQ1dk85RGh1dkNrdCtzOXJLdGc2NzVKQSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=", "integratedTime": 1738752154, "logIndex": 168898587, "logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d"}}, "RFC3161Timestamp": null}]

View file

@ -0,0 +1 @@
[{"Base64Signature": "MEQCICi2AOAJbS1k3334VMSo+qxaI4f5VoNnuVExZ4tfIu7rAiAiwuKdo8rGfFMGMLSFSQvoLF3JuwFy4JtNW6kQlwH7vg==", "Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hbG1ldC9kYW5nZXJ6b25lL2RhbmdlcnpvbmUifSwiaW1hZ2UiOnsiZG9ja2VyLW1hbmlmZXN0LWRpZ2VzdCI6InNoYTI1NjoyMjBiNTIyMDBlM2U0N2IxYjQyMDEwNjY3ZmNhYTkzMzg2ODFlNjRkZDNlMzRhMzQ4NzM4NjZjYjA1MWQ2OTRlIn0sInR5cGUiOiJjb3NpZ24gY29udGFpbmVyIGltYWdlIHNpZ25hdHVyZSJ9LCJvcHRpb25hbCI6bnVsbH0=", "Cert": null, "Chain": null, "Bundle": {"SignedEntryTimestamp": "MEUCIEvx6NtFeAag9TplqMLjVczT/tC6lpKe9SnrxbehBlxfAiEA07BE3f5JsMLsUsmHD58D6GaZr2yz+yQ66Os2ps8oKz8=", "Payload": {"body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiI4YmJmNGRiNjBmMmExM2IyNjI2NTI3MzljNWM5ZTYwNjNiMDYyNjVlODU1Zjc3MTdjMTdlYWY4YzViZTQyYWUyIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FUUNJQ2kyQU9BSmJTMWszMzM0Vk1TbytxeGFJNGY1Vm9ObnVWRXhaNHRmSXU3ckFpQWl3dUtkbzhyR2ZGTUdNTFNGU1F2b0xGM0p1d0Z5NEp0Tlc2a1Fsd0g3dmc9PSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=", "integratedTime": 1738859497, "logIndex": 169356501, "logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d"}}, "RFC3161Timestamp": null}]

View file

@ -0,0 +1 @@
[{"Base64Signature": "MEQCIDJxvB7lBU+VNYBD0xw/3Bi8wY7GPJ2fBP7mUFbguApoAiAIpuQT+sgatOY6yXkkA8K/sM40d5/gt7jQywWPbq5+iw==", "Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hcHlyZ2lvL2RhbmdlcnpvbmUvZGFuZ2Vyem9uZSJ9LCJpbWFnZSI6eyJkb2NrZXItbWFuaWZlc3QtZGlnZXN0Ijoic2hhMjU2OjRkYTQ0MTIzNWU4NGU5MzUxODc3ODgyN2E1YzU3NDVkNTMyZDdhNDA3OTg4NmUxNjQ3OTI0YmVlN2VmMWMxNGQifSwidHlwZSI6ImNvc2lnbiBjb250YWluZXIgaW1hZ2Ugc2lnbmF0dXJlIn0sIm9wdGlvbmFsIjpudWxsfQ==", "Cert": null, "Chain": null, "Bundle": {"SignedEntryTimestamp": "MEYCIQDuuuHoyZ2i4HKxik4Ju/MWkELwc1w5SfzcpCV7G+vZHAIhAO25R/+lIfQ/kMfC4PfeoWDwLpvnH9cq6dVSzl12i1su", "Payload": {"body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiIyMGE2ZDU1NTk4Y2U0NjU3NWZkZjViZGU3YzhhYWE2YTU2ZjZlMGRmOWNiYTY1MTJhMDAxODhjMTU1NGIzYjE3In19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FUUNJREp4dkI3bEJVK1ZOWUJEMHh3LzNCaTh3WTdHUEoyZkJQN21VRmJndUFwb0FpQUlwdVFUK3NnYXRPWTZ5WGtrQThLL3NNNDBkNS9ndDdqUXl3V1BicTUraXc9PSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=", "integratedTime": 1738688492, "logIndex": 168652066, "logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d"}}, "RFC3161Timestamp": null}]

View file

@ -0,0 +1 @@
[{"Base64Signature": "MEUCIQC2WlJH+B8VuX1c6i4sDwEGEZc53hXUD6/ds9TMJ3HrfwIgCxSnrNYRD2c8XENqfqc+Ik1gx0DK9kPNsn/Lt8V/dCo=", "Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hbG1ldC9kYW5nZXJ6b25lL2RhbmdlcnpvbmUifSwiaW1hZ2UiOnsiZG9ja2VyLW1hbmlmZXN0LWRpZ2VzdCI6InNoYTI1Njo3YjIxZGJkZWJmZmVkODU1NjIxZGZjZGVhYTUyMjMwZGM2NTY2OTk3Zjg1MmVmNWQ2MmIwMzM4YjQ2Nzk2ZTAxIn0sInR5cGUiOiJjb3NpZ24gY29udGFpbmVyIGltYWdlIHNpZ25hdHVyZSJ9LCJvcHRpb25hbCI6bnVsbH0=", "Cert": null, "Chain": null, "Bundle": {"SignedEntryTimestamp": "MEYCIQDn04gOHqiZcwUO+NVV9+29+abu6O/k1ve9zatJ3gVu9QIhAJL3E+mqVPdMPfMSdhHt2XDQsYzfRDDJNJEABQlbV3Jg", "Payload": {"body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiIzZWQwNWJlYTc2ZWFmMzBmYWM1NzBlNzhlODBlZmQxNDNiZWQxNzFjM2VjMDY5MWI2MDU3YjdhMDAzNGEyMzhlIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FVUNJUUMyV2xKSCtCOFZ1WDFjNmk0c0R3RUdFWmM1M2hYVUQ2L2RzOVRNSjNIcmZ3SWdDeFNuck5ZUkQyYzhYRU5xZnFjK0lrMWd4MERLOWtQTnNuL0x0OFYvZENvPSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=", "integratedTime": 1738862352, "logIndex": 169369149, "logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d"}}, "RFC3161Timestamp": null}]

View file

@ -0,0 +1 @@
[{"Base64Signature": "MEQCIHqXEMuAmt1pFCsHC71+ejlG5kjKrf1+AQW202OY3vhsAiA0BoDAVgAk9K7SgIRBpIV6u0veyB1iypzV0DteNh3IoQ==", "Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hbG1ldC9kYW5nZXJ6b25lL2RhbmdlcnpvbmUifSwiaW1hZ2UiOnsiZG9ja2VyLW1hbmlmZXN0LWRpZ2VzdCI6InNoYTI1NjpmYTk0ODcyNmFhYzI5YTZhYzQ5ZjAxZWM4ZmJiYWMxODUyMmIzNWIyNDkxZmRmNzE2MjM2YTBiMzUwMmEyY2E3In0sInR5cGUiOiJjb3NpZ24gY29udGFpbmVyIGltYWdlIHNpZ25hdHVyZSJ9LCJvcHRpb25hbCI6bnVsbH0=", "Cert": null, "Chain": null, "Bundle": {"SignedEntryTimestamp": "MEUCIQCrZ+2SSYdpIOEbyUXXaBxeqT8RTujpqdXipls9hmNvDgIgdWV84PiCY2cI49QjHjun7lj25/znGMDiwjCuPjIPA6Q=", "Payload": {"body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiI5ZjcwM2I4NTM4MjM4N2U2OTgwNzYxNDg1YzU0NGIzNmJmMThmNTA5ODQwMTMxYzRmOTJhMjE4OTI3MTJmNDJmIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FUUNJSHFYRU11QW10MXBGQ3NIQzcxK2VqbEc1a2pLcmYxK0FRVzIwMk9ZM3Zoc0FpQTBCb0RBVmdBazlLN1NnSVJCcElWNnUwdmV5QjFpeXB6VjBEdGVOaDNJb1E9PSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=", "integratedTime": 1737478056, "logIndex": 164177381, "logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d"}}, "RFC3161Timestamp": null}, {"Base64Signature": "MEYCIQDg8MeymBLOn+Khue0yK1yQy4Fu/+GXmyC/xezXO/p1JgIhAN6QLojKzkZGxyYirbqRbZCVcIM4YN3Y18FXwpW4RuUy", "Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hbG1ldC9kYW5nZXJ6b25lL2RhbmdlcnpvbmUifSwiaW1hZ2UiOnsiZG9ja2VyLW1hbmlmZXN0LWRpZ2VzdCI6InNoYTI1NjpmYTk0ODcyNmFhYzI5YTZhYzQ5ZjAxZWM4ZmJiYWMxODUyMmIzNWIyNDkxZmRmNzE2MjM2YTBiMzUwMmEyY2E3In0sInR5cGUiOiJjb3NpZ24gY29udGFpbmVyIGltYWdlIHNpZ25hdHVyZSJ9LCJvcHRpb25hbCI6bnVsbH0=", "Cert": null, "Chain": null, "Bundle": {"SignedEntryTimestamp": "MEUCIQCQLlrH2xo/bA6r386vOwA0OjUe0TqcxROT/Wo220jvGgIgPgRlKnQxWoXlD/Owf1Ogk5XlfXAt2f416LDbk4AoEvk=", "Payload": {"body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiI5ZjcwM2I4NTM4MjM4N2U2OTgwNzYxNDg1YzU0NGIzNmJmMThmNTA5ODQwMTMxYzRmOTJhMjE4OTI3MTJmNDJmIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FWUNJUURnOE1leW1CTE9uK0todWUweUsxeVF5NEZ1LytHWG15Qy94ZXpYTy9wMUpnSWhBTjZRTG9qS3prWkd4eVlpcmJxUmJaQ1ZjSU00WU4zWTE4Rlh3cFc0UnVVeSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=", "integratedTime": 1737557525, "logIndex": 164445483, "logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d"}}, "RFC3161Timestamp": null}, {"Base64Signature": "MEQCIEhUVYVW6EdovGDSSZt1Ffc86OfzEKAas94M4eFK7hoFAiA4+6219LktmgJSKuc2ObsnL5QjHyNLk58BwY0s8gBHbQ==", "Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hbG1ldC9kYW5nZXJ6b25lL2RhbmdlcnpvbmUifSwiaW1hZ2UiOnsiZG9ja2VyLW1hbmlmZXN0LWRpZ2VzdCI6InNoYTI1NjpmYTk0ODcyNmFhYzI5YTZhYzQ5ZjAxZWM4ZmJiYWMxODUyMmIzNWIyNDkxZmRmNzE2MjM2YTBiMzUwMmEyY2E3In0sInR5cGUiOiJjb3NpZ24gY29udGFpbmVyIGltYWdlIHNpZ25hdHVyZSJ9LCJvcHRpb25hbCI6bnVsbH0=", "Cert": null, "Chain": null, "Bundle": {"SignedEntryTimestamp": "MEQCIDRUTMwL+/eW79ARRLE8h/ByCrvo0rOn3vUYQg1E6KIBAiBi/bzoqcL2Ik27KpwfFosww4l7yI+9IqwCvUlkQgEB7g==", "Payload": {"body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiI5ZjcwM2I4NTM4MjM4N2U2OTgwNzYxNDg1YzU0NGIzNmJmMThmNTA5ODQwMTMxYzRmOTJhMjE4OTI3MTJmNDJmIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FUUNJRWhVVllWVzZFZG92R0RTU1p0MUZmYzg2T2Z6RUtBYXM5NE00ZUZLN2hvRkFpQTQrNjIxOUxrdG1nSlNLdWMyT2Jzbkw1UWpIeU5MazU4QndZMHM4Z0JIYlE9PSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=", "integratedTime": 1737567664, "logIndex": 164484602, "logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d"}}, "RFC3161Timestamp": null}]

View file

@ -0,0 +1,4 @@
-----BEGIN PUBLIC KEY-----
MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEoE0CXLGff79fR8KyPnSvOY74UBkt
2sLi+aVFUzS1Qwt4wosxHhcDN2B6QSsLlvgsH82q6qcA6PL2SdS/p4jWGA==
-----END PUBLIC KEY-----

View file

@ -13,6 +13,13 @@ from dangerzone.gui import Application
sys.dangerzone_dev = True # type: ignore[attr-defined]
ASSETS_PATH = Path(__file__).parent / "assets"
TEST_PUBKEY_PATH = ASSETS_PATH / "test.pub.key"
INVALID_SIGNATURES_PATH = ASSETS_PATH / "signatures" / "invalid"
VALID_SIGNATURES_PATH = ASSETS_PATH / "signatures" / "valid"
TEMPERED_SIGNATURES_PATH = ASSETS_PATH / "signatures" / "tempered"
# Use this fixture to make `pytest-qt` invoke our custom QApplication.
# See https://pytest-qt.readthedocs.io/en/latest/qapplication.html#testing-custom-qapplications
@pytest.fixture(scope="session")
@ -133,6 +140,11 @@ for_each_doc = pytest.mark.parametrize(
)
@pytest.fixture
def signature():
return {}
# External Docs - base64 docs encoded for externally sourced documents
# XXX to reduce the chance of accidentally opening them
test_docs_external_dir = Path(__file__).parent.joinpath(SAMPLE_EXTERNAL_DIRECTORY)

238
tests/test_registry.py Normal file
View file

@ -0,0 +1,238 @@
import hashlib
import pytest
import requests
from pytest_mock import MockerFixture
from dangerzone.updater.registry import (
Image,
_get_auth_header,
_url,
get_manifest,
get_manifest_digest,
list_tags,
parse_image_location,
)
def test_parse_image_location_no_tag():
"""Test that parse_image_location correctly handles an image location without a tag."""
image_str = "ghcr.io/freedomofpress/dangerzone"
image = parse_image_location(image_str)
assert isinstance(image, Image)
assert image.registry == "ghcr.io"
assert image.namespace == "freedomofpress"
assert image.image_name == "dangerzone"
assert image.tag == "latest" # Default tag should be "latest"
assert image.digest is None
def test_parse_image_location_with_tag():
"""Test that parse_image_location correctly handles an image location with a tag."""
image_str = "ghcr.io/freedomofpress/dangerzone:v0.4.2"
image = parse_image_location(image_str)
assert isinstance(image, Image)
assert image.registry == "ghcr.io"
assert image.namespace == "freedomofpress"
assert image.image_name == "dangerzone"
assert image.tag == "v0.4.2"
def test_parse_image_location_tag_plus_digest():
"""Test that parse_image_location handles an image location with a tag that includes a digest."""
image_str = (
"ghcr.io/freedomofpress/dangerzone"
":20250205-0.8.0-148-ge67fbc1"
"@sha256:19e8eacd75879d05f6621c2ea8dd955e68ee3e07b41b9d53f4c8cc9929a68a67"
)
image = parse_image_location(image_str)
assert isinstance(image, Image)
assert image.registry == "ghcr.io"
assert image.namespace == "freedomofpress"
assert image.image_name == "dangerzone"
assert image.tag == "20250205-0.8.0-148-ge67fbc1"
assert (
image.digest
== "sha256:19e8eacd75879d05f6621c2ea8dd955e68ee3e07b41b9d53f4c8cc9929a68a67"
)
def test_parse_invalid_image_location():
"""Test that parse_image_location raises an error for invalid image locations."""
invalid_image_locations = [
"ghcr.io/dangerzone", # Missing namespace
"ghcr.io/freedomofpress/dangerzone:", # Empty tag
"freedomofpress/dangerzone", # Missing registry
"ghcr.io:freedomofpress/dangerzone", # Invalid format
"", # Empty string
]
for invalid_image in invalid_image_locations:
with pytest.raises(ValueError, match="Malformed image location"):
parse_image_location(invalid_image)
def test_list_tags(mocker: MockerFixture):
"""Test that list_tags correctly retrieves tags from the registry."""
# Mock the authentication response
image_str = "ghcr.io/freedomofpress/dangerzone"
# Mock requests.get to return appropriate values for both calls
mock_response_auth = mocker.Mock()
mock_response_auth.json.return_value = {"token": "dummy_token"}
mock_response_auth.raise_for_status.return_value = None
mock_response_tags = mocker.Mock()
mock_response_tags.json.return_value = {
"tags": ["v0.4.0", "v0.4.1", "v0.4.2", "latest"]
}
mock_response_tags.raise_for_status.return_value = None
# Setup the mock to return different responses for each URL
def mock_get(url, **kwargs):
if "token" in url:
return mock_response_auth
else:
return mock_response_tags
mocker.patch("requests.get", side_effect=mock_get)
# Call the function
tags = list_tags(image_str)
# Verify the result
assert tags == ["v0.4.0", "v0.4.1", "v0.4.2", "latest"]
def test_list_tags_auth_error(mocker: MockerFixture):
"""Test that list_tags handles authentication errors correctly."""
image_str = "ghcr.io/freedomofpress/dangerzone"
# Mock requests.get to raise an HTTPError
mock_response = mocker.Mock()
mock_response.raise_for_status.side_effect = requests.exceptions.HTTPError(
"401 Client Error: Unauthorized"
)
mocker.patch("requests.get", return_value=mock_response)
# Call the function and expect an error
with pytest.raises(requests.exceptions.HTTPError):
list_tags(image_str)
def test_list_tags_registry_error(mocker: MockerFixture):
"""Test that list_tags handles registry errors correctly."""
image_str = "ghcr.io/freedomofpress/dangerzone"
# Mock requests.get to return success for auth but error for tags
mock_response_auth = mocker.Mock()
mock_response_auth.json.return_value = {"token": "dummy_token"}
mock_response_auth.raise_for_status.return_value = None
mock_response_tags = mocker.Mock()
mock_response_tags.raise_for_status.side_effect = requests.exceptions.HTTPError(
"404 Client Error: Not Found"
)
# Setup the mock to return different responses for each URL
def mock_get(url, **kwargs):
if "token" in url:
return mock_response_auth
else:
return mock_response_tags
mocker.patch("requests.get", side_effect=mock_get)
# Call the function and expect an error
with pytest.raises(requests.exceptions.HTTPError):
list_tags(image_str)
def test_get_manifest(mocker: MockerFixture):
"""Test that get_manifest correctly retrieves manifests from the registry."""
image_str = "ghcr.io/freedomofpress/dangerzone:v0.4.2"
# Mock the responses
manifest_content = {
"schemaVersion": 2,
"mediaType": "application/vnd.docker.distribution.manifest.v2+json",
"config": {
"mediaType": "application/vnd.docker.container.image.v1+json",
"size": 1234,
"digest": "sha256:abc123def456",
},
"layers": [
{
"mediaType": "application/vnd.docker.image.rootfs.diff.tar.gzip",
"size": 12345,
"digest": "sha256:layer1",
}
],
}
mock_response_auth = mocker.Mock()
mock_response_auth.json.return_value = {"token": "dummy_token"}
mock_response_auth.raise_for_status.return_value = None
mock_response_manifest = mocker.Mock()
mock_response_manifest.json.return_value = manifest_content
mock_response_manifest.status_code = 200
mock_response_manifest.raise_for_status.return_value = None
# Setup the mock to return different responses for each URL
def mock_get(url, **kwargs):
if "token" in url:
return mock_response_auth
else:
return mock_response_manifest
mocker.patch("requests.get", side_effect=mock_get)
# Call the function
response = get_manifest(image_str)
# Verify the result
assert response.status_code == 200
assert response.json() == manifest_content
def test_get_manifest_digest():
"""Test that get_manifest_digest correctly calculates the manifest digest."""
# Create a sample manifest content
manifest_content = b'{"schemaVersion":2,"mediaType":"application/vnd.docker.distribution.manifest.v2+json"}'
# Calculate the expected digest manually
import hashlib
expected_digest = hashlib.sha256(manifest_content).hexdigest()
# Call the function with the content directly
digest = get_manifest_digest("unused_image_str", manifest_content)
# Verify the result
assert digest == expected_digest
def test_get_manifest_digest_from_registry(mocker: MockerFixture):
"""Test that get_manifest_digest correctly retrieves and calculates digests from the registry."""
image_str = "ghcr.io/freedomofpress/dangerzone:v0.4.2"
# Sample manifest content
manifest_content = b'{"schemaVersion":2,"mediaType":"application/vnd.docker.distribution.manifest.v2+json"}'
expected_digest = hashlib.sha256(manifest_content).hexdigest()
# Mock get_manifest
mock_response = mocker.Mock()
mock_response.content = manifest_content
mocker.patch("dangerzone.updater.registry.get_manifest", return_value=mock_response)
# Call the function
digest = get_manifest_digest(image_str)
# Verify the result
assert digest == expected_digest

324
tests/test_signatures.py Normal file
View file

@ -0,0 +1,324 @@
import json
import unittest
from pathlib import Path
import pytest
from pytest_subprocess import FakeProcess
from dangerzone import errors as dzerrors
from dangerzone.updater import errors
from dangerzone.updater.signatures import (
Signature,
get_config_dir,
get_last_log_index,
get_log_index_from_signatures,
get_remote_signatures,
is_update_available,
load_and_verify_signatures,
prepare_airgapped_archive,
store_signatures,
upgrade_container_image,
verify_local_image,
verify_signature,
verify_signatures,
)
ASSETS_PATH = Path(__file__).parent / "assets"
TEST_PUBKEY_PATH = ASSETS_PATH / "test.pub.key"
INVALID_SIGNATURES_PATH = ASSETS_PATH / "signatures" / "invalid"
VALID_SIGNATURES_PATH = ASSETS_PATH / "signatures" / "valid"
TEMPERED_SIGNATURES_PATH = ASSETS_PATH / "signatures" / "tempered"
RANDOM_DIGEST = "aacc9b586648bbe3040f2822153b1d5ead2779af45ff750fd6f04daf4a9f64b4"
@pytest.fixture
def valid_signature():
signature_file = next(VALID_SIGNATURES_PATH.glob("**/*.json"))
with open(signature_file, "r") as signature_file:
signatures = json.load(signature_file)
return signatures.pop()
@pytest.fixture
def tempered_signature():
signature_file = next(TEMPERED_SIGNATURES_PATH.glob("**/*.json"))
with open(signature_file, "r") as signature_file:
signatures = json.load(signature_file)
return signatures.pop()
@pytest.fixture
def signature_other_digest(valid_signature):
signature = valid_signature.copy()
signature["Bundle"]["Payload"]["digest"] = "sha256:123456"
return signature
def test_load_valid_signatures(mocker):
mocker.patch("dangerzone.updater.signatures.SIGNATURES_PATH", VALID_SIGNATURES_PATH)
valid_signatures = list(VALID_SIGNATURES_PATH.glob("**/*.json"))
assert len(valid_signatures) > 0
for file in valid_signatures:
signatures = load_and_verify_signatures(file.stem, TEST_PUBKEY_PATH)
assert isinstance(signatures, list)
assert len(signatures) > 0
def test_load_invalid_signatures(mocker):
mocker.patch(
"dangerzone.updater.signatures.SIGNATURES_PATH", INVALID_SIGNATURES_PATH
)
invalid_signatures = list(INVALID_SIGNATURES_PATH.glob("**/*.json"))
assert len(invalid_signatures) > 0
for file in invalid_signatures:
with pytest.raises(errors.SignatureError):
load_and_verify_signatures(file.stem, TEST_PUBKEY_PATH)
def test_load_tempered_signatures(mocker):
mocker.patch(
"dangerzone.updater.signatures.SIGNATURES_PATH", TEMPERED_SIGNATURES_PATH
)
tempered_signatures = list(TEMPERED_SIGNATURES_PATH.glob("**/*.json"))
assert len(tempered_signatures) > 0
for file in tempered_signatures:
with pytest.raises(errors.SignatureError):
load_and_verify_signatures(file.stem, TEST_PUBKEY_PATH)
def test_get_log_index_from_signatures():
signatures = [{"Bundle": {"Payload": {"logIndex": 1}}}]
assert get_log_index_from_signatures(signatures) == 1
def test_get_log_index_from_signatures_empty():
signatures = []
assert get_log_index_from_signatures(signatures) == 0
def test_get_log_index_from_malformed_signatures():
signatures = [{"Bundle": {"Payload": {"logIndex": "foo"}}}]
assert get_log_index_from_signatures(signatures) == 0
def test_get_log_index_from_missing_log_index():
signatures = [{"Bundle": {"Payload": {}}}]
assert get_log_index_from_signatures(signatures) == 0
def test_upgrade_container_image_if_already_up_to_date(mocker):
mocker.patch(
"dangerzone.updater.signatures.is_update_available", return_value=(False, None)
)
with pytest.raises(errors.ImageAlreadyUpToDate):
upgrade_container_image(
"ghcr.io/freedomofpress/dangerzone/dangerzone", "sha256:123456", "test.pub"
)
def test_upgrade_container_without_signatures(mocker):
mocker.patch(
"dangerzone.updater.signatures.is_update_available",
return_value=(True, "sha256:123456"),
)
mocker.patch("dangerzone.updater.signatures.get_remote_signatures", return_value=[])
with pytest.raises(errors.SignatureVerificationError):
upgrade_container_image(
"ghcr.io/freedomofpress/dangerzone/dangerzone",
"sha256:123456",
"test.pub",
)
def test_upgrade_container_lower_log_index(mocker):
image_digest = "4da441235e84e93518778827a5c5745d532d7a4079886e1647924bee7ef1c14d"
signatures = load_and_verify_signatures(
image_digest,
TEST_PUBKEY_PATH,
bypass_verification=True,
signatures_path=VALID_SIGNATURES_PATH,
)
mocker.patch(
"dangerzone.updater.signatures.is_update_available",
return_value=(
True,
image_digest,
),
)
mocker.patch(
"dangerzone.updater.signatures.get_remote_signatures",
return_value=signatures,
)
# Mock to avoid loosing time on test failures
mocker.patch("dangerzone.container_utils.container_pull")
# The log index of the incoming signatures is 168652066
mocker.patch(
"dangerzone.updater.signatures.get_last_log_index",
return_value=168652067,
)
with pytest.raises(errors.InvalidLogIndex):
upgrade_container_image(
"ghcr.io/freedomofpress/dangerzone/dangerzone",
image_digest,
TEST_PUBKEY_PATH,
)
def test_prepare_airgapped_archive_requires_digest():
with pytest.raises(errors.AirgappedImageDownloadError):
prepare_airgapped_archive(
"ghcr.io/freedomofpress/dangerzone/dangerzone", "test.tar"
)
def test_get_remote_signatures_error(fp: FakeProcess, mocker):
image = "ghcr.io/freedomofpress/dangerzone/dangerzone"
digest = "123456"
mocker.patch("dangerzone.updater.cosign.ensure_installed", return_value=True)
fp.register_subprocess(
["cosign", "download", "signature", f"{image}@sha256:{digest}"], returncode=1
)
with pytest.raises(errors.NoRemoteSignatures):
get_remote_signatures(image, digest)
def test_get_remote_signatures_empty(fp: FakeProcess, mocker):
image = "ghcr.io/freedomofpress/dangerzone/dangerzone"
digest = "123456"
mocker.patch("dangerzone.updater.cosign.ensure_installed", return_value=True)
fp.register_subprocess(
["cosign", "download", "signature", f"{image}@sha256:{digest}"],
stdout=json.dumps({}),
)
with pytest.raises(errors.NoRemoteSignatures):
get_remote_signatures(image, digest)
def test_get_remote_signatures_cosign_error(mocker, fp: FakeProcess):
image = "ghcr.io/freedomofpress/dangerzone/dangerzone"
digest = "123456"
mocker.patch("dangerzone.updater.cosign.ensure_installed", return_value=True)
fp.register_subprocess(
["cosign", "download", "signature", f"{image}@sha256:{digest}"],
returncode=1,
stderr="Error: no signatures associated",
)
with pytest.raises(errors.NoRemoteSignatures):
get_remote_signatures(image, digest)
def test_store_signatures_with_different_digests(
valid_signature, signature_other_digest, mocker, tmp_path
):
"""Test that store_signatures raises an error when a signature's digest doesn't match."""
signatures = [valid_signature, signature_other_digest]
image_digest = "sha256:123456"
# Mock the signatures path
signatures_path = tmp_path / "signatures"
signatures_path.mkdir()
mocker.patch("dangerzone.updater.signatures.SIGNATURES_PATH", signatures_path)
# Mock get_log_index_from_signatures
mocker.patch(
"dangerzone.updater.signatures.get_log_index_from_signatures",
return_value=100,
)
# Mock get_last_log_index
mocker.patch(
"dangerzone.updater.signatures.get_last_log_index",
return_value=50,
)
# Call store_signatures
with pytest.raises(errors.SignatureMismatch):
store_signatures(signatures, image_digest, TEST_PUBKEY_PATH)
# Verify that the signatures file was not created
assert not (signatures_path / f"{image_digest}.json").exists()
# Verify that the log index file was not updated
assert not (signatures_path / "last_log_index").exists()
def test_stores_signatures_updates_last_log_index(valid_signature, mocker, tmp_path):
"""Test that store_signatures updates the last log index file."""
signatures = [valid_signature]
# Extract the digest from the signature
image_digest = Signature(valid_signature).manifest_digest
# Mock the signatures path
signatures_path = tmp_path / "signatures"
signatures_path.mkdir()
mocker.patch("dangerzone.updater.signatures.SIGNATURES_PATH", signatures_path)
# Create an existing last_log_index file with a lower value
with open(signatures_path / "last_log_index", "w") as f:
f.write("50")
# Mock get_log_index_from_signatures to return a higher value
mocker.patch(
"dangerzone.updater.signatures.get_log_index_from_signatures",
return_value=100,
)
# Call store_signatures
store_signatures(signatures, image_digest, TEST_PUBKEY_PATH)
# Verify that the log index file was updated
assert (signatures_path / "last_log_index").exists()
with open(signatures_path / "last_log_index", "r") as f:
assert f.read() == "100"
def test_is_update_available_when_no_local_image(mocker):
"""
Test that is_update_available returns True when no local image is
currently present.
"""
# Mock container_image_exists to return False
mocker.patch(
"dangerzone.container_utils.get_local_image_digest",
side_effect=dzerrors.ImageNotPresentException,
)
# Mock get_manifest_digest to return a digest
mocker.patch(
"dangerzone.updater.registry.get_manifest_digest",
return_value=RANDOM_DIGEST,
)
# Call is_update_available
update_available, digest = is_update_available("ghcr.io/freedomofpress/dangerzone")
# Verify the result
assert update_available is True
assert digest == RANDOM_DIGEST
def test_verify_signature(valid_signature):
"""Test that verify_signature raises an error when the payload digest doesn't match."""
verify_signature(
valid_signature,
Signature(valid_signature).manifest_digest,
TEST_PUBKEY_PATH,
)
def test_verify_signature_tempered(tempered_signature):
"""Test that verify_signature raises an error when the payload digest doesn't match."""
# Call verify_signature and expect an error
with pytest.raises(errors.SignatureError):
verify_signature(
tempered_signature,
Signature(tempered_signature).manifest_digest,
TEST_PUBKEY_PATH,
)
def test_verify_signatures_empty_list():
with pytest.raises(errors.SignatureVerificationError):
verify_signatures([], "1234", TEST_PUBKEY_PATH)