mirror of
https://github.com/freedomofpress/dangerzone.git
synced 2025-05-19 11:40:36 +02:00
Compare commits
4 commits
12aafa2606
...
aedfc3b9a2
Author | SHA1 | Date | |
---|---|---|---|
![]() |
aedfc3b9a2 | ||
![]() |
97d7b52093 | ||
![]() |
9c2d7a7f7b | ||
![]() |
8ae4af8698 |
8 changed files with 243 additions and 93 deletions
|
@ -3,7 +3,7 @@ import logging
|
|||
import platform
|
||||
import shutil
|
||||
import subprocess
|
||||
from typing import List, Tuple
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
from . import errors
|
||||
from .util import get_resource_path, get_subprocess_startupinfo
|
||||
|
@ -155,6 +155,9 @@ def load_image_tarball_file(tarball_path: str) -> None:
|
|||
|
||||
|
||||
def tag_image_by_digest(digest: str, tag: str) -> None:
|
||||
"""Tag a container image by digest.
|
||||
The sha256: prefix should be omitted from the digest.
|
||||
"""
|
||||
image_id = get_image_id_by_digest(digest)
|
||||
cmd = [get_runtime(), "tag", image_id, tag]
|
||||
log.debug(" ".join(cmd))
|
||||
|
@ -162,11 +165,14 @@ def tag_image_by_digest(digest: str, tag: str) -> None:
|
|||
|
||||
|
||||
def get_image_id_by_digest(digest: str) -> str:
|
||||
"""Get an image ID from a digest.
|
||||
The sha256: prefix should be omitted from the digest.
|
||||
"""
|
||||
cmd = [
|
||||
get_runtime(),
|
||||
"images",
|
||||
"-f",
|
||||
f"digest={digest}",
|
||||
f"digest=sha256:{digest}",
|
||||
"--format",
|
||||
"{{.Id}}",
|
||||
]
|
||||
|
@ -174,7 +180,8 @@ def get_image_id_by_digest(digest: str) -> str:
|
|||
process = subprocess.run(
|
||||
cmd, startupinfo=get_subprocess_startupinfo(), check=True, capture_output=True
|
||||
)
|
||||
return process.stdout.decode().strip()
|
||||
# In case we have multiple lines, we only want the first one.
|
||||
return process.stdout.decode().strip().split("\n")[0]
|
||||
|
||||
|
||||
def container_pull(image: str) -> bool:
|
||||
|
@ -185,10 +192,14 @@ def container_pull(image: str) -> bool:
|
|||
return process.returncode == 0
|
||||
|
||||
|
||||
def get_local_image_hash(image: str) -> str:
|
||||
def get_local_image_hash(image: str) -> Optional[str]:
|
||||
"""
|
||||
Returns a image hash from a local image name
|
||||
"""
|
||||
cmd = [get_runtime_name(), "image", "inspect", image, "-f", "{{.Digest}}"]
|
||||
result = subprocess.run(cmd, capture_output=True, check=True)
|
||||
return result.stdout.strip().decode().strip("sha256:")
|
||||
try:
|
||||
result = subprocess.run(cmd, capture_output=True, check=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
return None
|
||||
else:
|
||||
return result.stdout.strip().decode().strip("sha256:")
|
||||
|
|
|
@ -1,17 +1,17 @@
|
|||
import subprocess
|
||||
from tempfile import NamedTemporaryFile
|
||||
|
||||
from . import utils
|
||||
from . import cosign
|
||||
|
||||
|
||||
def verify_attestation(
|
||||
def verify(
|
||||
manifest: bytes, attestation_bundle: bytes, image_tag: str, expected_repo: str
|
||||
) -> bool:
|
||||
"""
|
||||
Look up the image attestation to see if the image has been built
|
||||
on Github runners, and from a given repository.
|
||||
"""
|
||||
utils.ensure_cosign()
|
||||
cosign.ensure_installed()
|
||||
|
||||
# Put the value in files and verify with cosign
|
||||
with (
|
||||
|
|
|
@ -5,16 +5,10 @@ import logging
|
|||
import click
|
||||
|
||||
from ..util import get_resource_path
|
||||
from . import errors, log, registry
|
||||
from .attestations import verify_attestation
|
||||
from .signatures import (
|
||||
upgrade_container_image,
|
||||
upgrade_container_image_airgapped,
|
||||
verify_offline_image_signature,
|
||||
)
|
||||
from . import attestations, errors, log, registry, signatures
|
||||
|
||||
DEFAULT_REPOSITORY = "freedomofpress/dangerzone"
|
||||
DEFAULT_IMAGE_NAME = "ghcr.io/freedomofpress/dangerzone"
|
||||
DEFAULT_IMAGE_NAME = "ghcr.io/freedomofpress/dangerzone/dangerzone"
|
||||
PUBKEY_DEFAULT_LOCATION = get_resource_path("freedomofpress-dangerzone-pub.key")
|
||||
|
||||
|
||||
|
@ -30,14 +24,18 @@ def main(debug: bool) -> None:
|
|||
|
||||
|
||||
@main.command()
|
||||
@click.argument("image")
|
||||
@click.argument("image", default=DEFAULT_IMAGE_NAME)
|
||||
@click.option("--pubkey", default=PUBKEY_DEFAULT_LOCATION)
|
||||
def upgrade(image: str, pubkey: str) -> None:
|
||||
"""Upgrade the image to the latest signed version."""
|
||||
manifest_hash = registry.get_manifest_hash(image)
|
||||
try:
|
||||
is_upgraded = upgrade_container_image(image, manifest_hash, pubkey)
|
||||
click.echo(f"✅ The local image {image} has been upgraded")
|
||||
is_upgraded = signatures.upgrade_container_image(image, manifest_hash, pubkey)
|
||||
if is_upgraded:
|
||||
click.echo(f"✅ The local image {image} has been upgraded")
|
||||
click.echo(f"✅ The image has been signed with {pubkey}")
|
||||
click.echo(f"✅ Signatures has been verified and stored locally")
|
||||
|
||||
except errors.ImageAlreadyUpToDate as e:
|
||||
click.echo(f"✅ {e}")
|
||||
raise click.Abort()
|
||||
|
@ -46,12 +44,15 @@ def upgrade(image: str, pubkey: str) -> None:
|
|||
@main.command()
|
||||
@click.argument("image_filename")
|
||||
@click.option("--pubkey", default=PUBKEY_DEFAULT_LOCATION)
|
||||
@click.option("--image-name", default=DEFAULT_IMAGE_NAME)
|
||||
def upgrade_airgapped(image_filename: str, pubkey: str, image_name: str) -> None:
|
||||
"""Upgrade the image to the latest signed version."""
|
||||
def load_archive(image_filename: str, pubkey: str) -> None:
|
||||
"""Upgrade the local image to the one in the archive."""
|
||||
try:
|
||||
upgrade_container_image_airgapped(image_filename, pubkey, image_name)
|
||||
click.echo(f"✅ Installed image {image_filename} on the system")
|
||||
loaded_image = signatures.upgrade_container_image_airgapped(
|
||||
image_filename, pubkey
|
||||
)
|
||||
click.echo(
|
||||
f"✅ Installed image {image_filename} on the system as {loaded_image}"
|
||||
)
|
||||
except errors.ImageAlreadyUpToDate as e:
|
||||
click.echo(f"✅ {e}")
|
||||
raise click.Abort()
|
||||
|
@ -59,13 +60,22 @@ def upgrade_airgapped(image_filename: str, pubkey: str, image_name: str) -> None
|
|||
|
||||
@main.command()
|
||||
@click.argument("image")
|
||||
@click.option("--output", default="dangerzone-airgapped.tar")
|
||||
def prepare_archive(image: str, output: str) -> None:
|
||||
"""Prepare an archive to upgrade the dangerzone image on an airgapped environment."""
|
||||
signatures.prepare_airgapped_archive(image, output)
|
||||
click.echo(f"✅ Archive {output} created")
|
||||
|
||||
|
||||
@main.command()
|
||||
@click.argument("image", default=DEFAULT_IMAGE_NAME)
|
||||
@click.option("--pubkey", default=PUBKEY_DEFAULT_LOCATION)
|
||||
def verify_offline(image: str, pubkey: str) -> None:
|
||||
def verify_local(image: str, pubkey: str) -> None:
|
||||
"""
|
||||
Verify the local image signature against a public key and the stored signatures.
|
||||
"""
|
||||
# XXX remove a potentiel :tag
|
||||
if verify_offline_image_signature(image, pubkey):
|
||||
if signatures.verify_local_image(image, pubkey):
|
||||
click.echo(
|
||||
(
|
||||
f"Verifying the local image:\n\n"
|
||||
|
@ -79,6 +89,7 @@ def verify_offline(image: str, pubkey: str) -> None:
|
|||
@main.command()
|
||||
@click.argument("image")
|
||||
def list_remote_tags(image: str) -> None:
|
||||
"""List the tags available for a given image."""
|
||||
click.echo(f"Existing tags for {image}")
|
||||
for tag in registry.list_tags(image):
|
||||
click.echo(tag)
|
||||
|
@ -87,6 +98,7 @@ def list_remote_tags(image: str) -> None:
|
|||
@main.command()
|
||||
@click.argument("image")
|
||||
def get_manifest(image: str) -> None:
|
||||
"""Retrieves a remove manifest for a given image and displays it."""
|
||||
click.echo(registry.get_manifest(image))
|
||||
|
||||
|
||||
|
@ -109,7 +121,7 @@ def attest_provenance(image: str, repository: str) -> None:
|
|||
parsed = registry.parse_image_location(image)
|
||||
manifest, bundle = registry.get_attestation(image)
|
||||
|
||||
verified = verify_attestation(manifest, bundle, parsed.tag, repository)
|
||||
verified = attestations.verify(manifest, bundle, parsed.tag, repository)
|
||||
if verified:
|
||||
click.echo(
|
||||
f"🎉 The image available at `{parsed.full_name}` has been built by Github Runners from the `{repository}` repository"
|
||||
|
|
32
dangerzone/updater/cosign.py
Normal file
32
dangerzone/updater/cosign.py
Normal file
|
@ -0,0 +1,32 @@
|
|||
import subprocess
|
||||
|
||||
from . import errors, log
|
||||
|
||||
|
||||
def ensure_installed() -> None:
|
||||
try:
|
||||
subprocess.run(["cosign", "version"], capture_output=True, check=True)
|
||||
except subprocess.CalledProcessError:
|
||||
raise errors.CosignNotInstalledError()
|
||||
|
||||
|
||||
def verify_local_image(oci_image_folder: str, pubkey: str) -> bool:
|
||||
"""Verify the given path against the given public key"""
|
||||
|
||||
ensure_installed()
|
||||
cmd = [
|
||||
"cosign",
|
||||
"verify",
|
||||
"--key",
|
||||
pubkey,
|
||||
"--offline",
|
||||
"--local-image",
|
||||
oci_image_folder,
|
||||
]
|
||||
log.debug(" ".join(cmd))
|
||||
result = subprocess.run(cmd, capture_output=True)
|
||||
if result.returncode == 0:
|
||||
log.info("Signature verified")
|
||||
return True
|
||||
log.info("Failed to verify signature", result.stderr)
|
||||
return False
|
|
@ -6,6 +6,10 @@ class ImageAlreadyUpToDate(UpdaterError):
|
|||
pass
|
||||
|
||||
|
||||
class ImageNotFound(UpdaterError):
|
||||
pass
|
||||
|
||||
|
||||
class SignatureError(UpdaterError):
|
||||
pass
|
||||
|
||||
|
@ -14,6 +18,10 @@ class RegistryError(UpdaterError):
|
|||
pass
|
||||
|
||||
|
||||
class AirgappedImageDownloadError(UpdaterError):
|
||||
pass
|
||||
|
||||
|
||||
class NoRemoteSignatures(SignatureError):
|
||||
pass
|
||||
|
||||
|
@ -22,6 +30,10 @@ class SignatureVerificationError(SignatureError):
|
|||
pass
|
||||
|
||||
|
||||
class SignatureExtractionError(SignatureError):
|
||||
pass
|
||||
|
||||
|
||||
class SignaturesFolderDoesNotExist(SignatureError):
|
||||
pass
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@ import platform
|
|||
import re
|
||||
import subprocess
|
||||
import tarfile
|
||||
from base64 import b64decode
|
||||
from base64 import b64decode, b64encode
|
||||
from hashlib import sha256
|
||||
from io import BytesIO
|
||||
from pathlib import Path
|
||||
|
@ -11,7 +11,7 @@ from tempfile import NamedTemporaryFile, TemporaryDirectory
|
|||
from typing import Dict, List, Optional, Tuple
|
||||
|
||||
from .. import container_utils as runtime
|
||||
from . import errors, log, registry, utils
|
||||
from . import cosign, errors, log, registry
|
||||
|
||||
try:
|
||||
import platformdirs
|
||||
|
@ -55,39 +55,22 @@ def signature_to_bundle(sig: Dict) -> Dict:
|
|||
}
|
||||
|
||||
|
||||
def cosign_verify_local_image(oci_image_folder: str, pubkey: str) -> bool:
|
||||
"""Verify the given path against the given public key"""
|
||||
|
||||
utils.ensure_cosign()
|
||||
cmd = [
|
||||
"cosign",
|
||||
"verify",
|
||||
"--key",
|
||||
pubkey,
|
||||
"--offline",
|
||||
"--local-image",
|
||||
oci_image_folder,
|
||||
]
|
||||
log.debug(" ".join(cmd))
|
||||
result = subprocess.run(cmd, capture_output=True)
|
||||
if result.returncode == 0:
|
||||
log.debug("Signature verified")
|
||||
return True
|
||||
log.debug("Failed to verify signature", result.stderr)
|
||||
return False
|
||||
|
||||
|
||||
def verify_signature(signature: dict, image_hash: str, pubkey: str) -> bool:
|
||||
"""Verify a signature against a given public key"""
|
||||
# XXX - Also verfy the identity/docker-reference field against the expected value
|
||||
# e.g. ghcr.io/freedomofpress/dangerzone/dangerzone
|
||||
|
||||
utils.ensure_cosign()
|
||||
cosign.ensure_installed()
|
||||
signature_bundle = signature_to_bundle(signature)
|
||||
|
||||
payload_bytes = b64decode(signature_bundle["Payload"])
|
||||
if json.loads(payload_bytes)["critical"]["type"] != f"sha256:{image_hash}":
|
||||
raise errors.SignatureMismatch("The signature does not match the image hash")
|
||||
payload_hash = json.loads(payload_bytes)["critical"]["image"][
|
||||
"docker-manifest-digest"
|
||||
]
|
||||
if payload_hash != f"sha256:{image_hash}":
|
||||
raise errors.SignatureMismatch(
|
||||
f"The signature does not match the image hash ({payload_hash}, {image_hash})"
|
||||
)
|
||||
|
||||
with (
|
||||
NamedTemporaryFile(mode="w") as signature_file,
|
||||
|
@ -156,37 +139,56 @@ def upgrade_container_image(image: str, manifest_hash: str, pubkey: str) -> bool
|
|||
return runtime.container_pull(image)
|
||||
|
||||
|
||||
def upgrade_container_image_airgapped(
|
||||
container_tar: str, pubkey: str, image_name: str
|
||||
) -> bool:
|
||||
def _get_blob(tmpdir: str, hash: str) -> Path:
|
||||
return Path(tmpdir) / "blobs" / "sha256" / hash.replace("sha256:", "")
|
||||
|
||||
|
||||
def upgrade_container_image_airgapped(container_tar: str, pubkey: str) -> str:
|
||||
"""
|
||||
Verify the given archive against its self-contained signatures, then
|
||||
upgrade the image and retag it to the expected tag.
|
||||
|
||||
Right now, the archive is extracted and reconstructed, requiring some space
|
||||
on the filesystem.
|
||||
|
||||
:return: The loaded image name
|
||||
"""
|
||||
|
||||
# XXX Use a memory buffer instead of the filesystem
|
||||
with TemporaryDirectory() as tmpdir:
|
||||
|
||||
def _get_signature_filename(manifests: List[Dict]) -> Path:
|
||||
for manifest in manifests:
|
||||
if (
|
||||
manifest["annotations"].get("kind")
|
||||
== "dev.cosignproject.cosign/sigs"
|
||||
):
|
||||
return _get_blob(tmpdir, manifest["digest"])
|
||||
raise errors.SignatureExtractionError()
|
||||
|
||||
with tarfile.open(container_tar, "r") as archive:
|
||||
archive.extractall(tmpdir)
|
||||
|
||||
# XXX Check if the contained signatures match the given ones?
|
||||
# Or maybe store both signatures?
|
||||
if not cosign_verify_local_image(tmpdir, pubkey):
|
||||
if not cosign.verify_local_image(tmpdir, pubkey):
|
||||
raise errors.SignatureVerificationError()
|
||||
|
||||
# Remove the signatures from the archive.
|
||||
with open(Path(tmpdir) / "index.json") as f:
|
||||
index_json = json.load(f)
|
||||
index_json["manifests"] = [
|
||||
manifest
|
||||
for manifest in index_json["manifests"]
|
||||
if manifest["annotations"].get("kind")
|
||||
!= "dev.cosignproject.cosign/sigs"
|
||||
]
|
||||
|
||||
image_digest = index_json["manifests"][0].get("digest")
|
||||
signature_filename = _get_signature_filename(index_json["manifests"])
|
||||
|
||||
index_json["manifests"] = [
|
||||
manifest
|
||||
for manifest in index_json["manifests"]
|
||||
if manifest["annotations"].get("kind") != "dev.cosignproject.cosign/sigs"
|
||||
]
|
||||
|
||||
with open(signature_filename, "rb") as f:
|
||||
image_name, signatures = convert_oci_images_signatures(json.load(f), tmpdir)
|
||||
log.info(f"Found image name: {image_name}")
|
||||
|
||||
image_digest = index_json["manifests"][0].get("digest").replace("sha256:", "")
|
||||
|
||||
# Write the new index.json to the temp folder
|
||||
with open(Path(tmpdir) / "index.json", "w") as f:
|
||||
|
@ -202,13 +204,40 @@ def upgrade_container_image_airgapped(
|
|||
runtime.load_image_tarball_file(temporary_tar.name)
|
||||
runtime.tag_image_by_digest(image_digest, image_name)
|
||||
|
||||
# XXX Convert the signatures to the expected format
|
||||
store_signatures(signatures, image_digest, pubkey)
|
||||
return image_name
|
||||
|
||||
# At this point, the signatures are verified
|
||||
# We store the signatures just now to avoid storing unverified signatures
|
||||
# store_signatures(signatures, image_hash, pubkey)
|
||||
|
||||
return True
|
||||
def convert_oci_images_signatures(
|
||||
signatures_manifest: List[Dict], tmpdir: str
|
||||
) -> (str, List[Dict]):
|
||||
def _to_cosign_signature(layer: Dict) -> Dict:
|
||||
signature = layer["annotations"]["dev.cosignproject.cosign/signature"]
|
||||
bundle = json.loads(layer["annotations"]["dev.sigstore.cosign/bundle"])
|
||||
payload_body = json.loads(b64decode(bundle["Payload"]["body"]))
|
||||
|
||||
payload_location = _get_blob(tmpdir, layer["digest"])
|
||||
with open(payload_location, "rb") as f:
|
||||
payload_b64 = b64encode(f.read()).decode()
|
||||
|
||||
return {
|
||||
"Base64Signature": payload_body["spec"]["signature"]["content"],
|
||||
"Payload": payload_b64,
|
||||
"Cert": None,
|
||||
"Chain": None,
|
||||
"Bundle": bundle,
|
||||
"RFC3161Timestamp": None,
|
||||
}
|
||||
|
||||
layers = signatures_manifest["layers"]
|
||||
signatures = [_to_cosign_signature(layer) for layer in layers]
|
||||
|
||||
payload_location = _get_blob(tmpdir, layers[0]["digest"])
|
||||
with open(payload_location, "r") as f:
|
||||
payload = json.load(f)
|
||||
image_name = payload["critical"]["identity"]["docker-reference"]
|
||||
|
||||
return image_name, signatures
|
||||
|
||||
|
||||
def get_file_hash(file: Optional[str] = None, content: Optional[bytes] = None) -> str:
|
||||
|
@ -268,24 +297,30 @@ def store_signatures(signatures: list[Dict], image_hash: str, pubkey: str) -> No
|
|||
raise errors.InvalidSignatures("Signatures do not share the same image hash")
|
||||
|
||||
if f"sha256:{image_hash}" != hashes[0]:
|
||||
raise errors.SignatureMismatch("Signatures do not match the given image hash")
|
||||
raise errors.SignatureMismatch(
|
||||
f"Signatures do not match the given image hash ({image_hash}, {hashes[0]})"
|
||||
)
|
||||
|
||||
pubkey_signatures = SIGNATURES_PATH / get_file_hash(pubkey)
|
||||
pubkey_signatures.mkdir(exist_ok=True)
|
||||
|
||||
with open(pubkey_signatures / f"{image_hash}.json", "w") as f:
|
||||
log.debug(
|
||||
log.info(
|
||||
f"Storing signatures for {image_hash} in {pubkey_signatures}/{image_hash}.json"
|
||||
)
|
||||
json.dump(signatures, f)
|
||||
|
||||
|
||||
def verify_offline_image_signature(image: str, pubkey: str) -> bool:
|
||||
def verify_local_image(image: str, pubkey: str) -> bool:
|
||||
"""
|
||||
Verifies that a local image has a valid signature
|
||||
"""
|
||||
log.info(f"Verifying local image {image} against pubkey {pubkey}")
|
||||
image_hash = runtime.get_local_image_hash(image)
|
||||
try:
|
||||
image_hash = runtime.get_local_image_hash(image)
|
||||
except subprocess.CalledProcessError:
|
||||
raise errors.ImageNotFound(f"The image {image} does not exist locally")
|
||||
|
||||
log.debug(f"Image hash: {image_hash}")
|
||||
signatures = load_signatures(image_hash, pubkey)
|
||||
if len(signatures) < 1:
|
||||
|
@ -300,7 +335,7 @@ def verify_offline_image_signature(image: str, pubkey: str) -> bool:
|
|||
|
||||
def get_remote_signatures(image: str, hash: str) -> List[Dict]:
|
||||
"""Retrieve the signatures from the registry, via `cosign download`."""
|
||||
utils.ensure_cosign()
|
||||
cosign.ensure_installed()
|
||||
|
||||
process = subprocess.run(
|
||||
["cosign", "download", "signature", f"{image}@sha256:{hash}"],
|
||||
|
@ -315,3 +350,28 @@ def get_remote_signatures(image: str, hash: str) -> List[Dict]:
|
|||
if len(signatures) < 1:
|
||||
raise errors.NoRemoteSignatures("No signatures found for the image")
|
||||
return signatures
|
||||
|
||||
|
||||
def prepare_airgapped_archive(image_name, destination):
|
||||
if "@sha256:" not in image_name:
|
||||
raise errors.AirgappedImageDownloadError(
|
||||
"The image name must include a digest, e.g. ghcr.io/freedomofpress/dangerzone/dangerzone@sha256:123456"
|
||||
)
|
||||
|
||||
cosign.ensure_installed()
|
||||
# Get the image from the registry
|
||||
|
||||
with TemporaryDirectory() as tmpdir:
|
||||
msg = f"Downloading image {image_name}. \nIt might take a while."
|
||||
log.info(msg)
|
||||
|
||||
process = subprocess.run(
|
||||
["cosign", "save", image_name, "--dir", tmpdir],
|
||||
capture_output=True,
|
||||
check=True,
|
||||
)
|
||||
if process.returncode != 0:
|
||||
raise errors.AirgappedImageDownloadError()
|
||||
|
||||
with tarfile.open(destination, "w") as archive:
|
||||
archive.add(tmpdir, arcname=".")
|
||||
|
|
|
@ -1,10 +0,0 @@
|
|||
import subprocess
|
||||
|
||||
from . import errors
|
||||
|
||||
|
||||
def ensure_cosign() -> None:
|
||||
try:
|
||||
subprocess.run(["cosign", "version"], capture_output=True, check=True)
|
||||
except subprocess.CalledProcessError:
|
||||
raise errors.CosignNotInstalledError()
|
|
@ -1,19 +1,19 @@
|
|||
# Independent Container Updates
|
||||
|
||||
Since version 0.9.0, Dangerzone is able to ship container images independently
|
||||
from issuing a new release of the software.
|
||||
from releases.
|
||||
|
||||
This is useful as images need to be kept updated with the latest security fixes.
|
||||
One of the main benefits of doing so is to lower the time needed to patch security issues inside the containers.
|
||||
|
||||
## Nightly images and attestations
|
||||
## Checking attestations
|
||||
|
||||
Each night, new images are built and pushed to our container registry, alongside
|
||||
Each night, new images are built and pushed to the container registry, alongside
|
||||
with a provenance attestation, enabling anybody to ensure that the image has
|
||||
been originally built by Github CI runners, from a defined source repository (in our case `freedomofpress/dangerzone`).
|
||||
|
||||
To verify the attestations against our expectations, use the following command:
|
||||
```bash
|
||||
poetry run ./dev_scripts/registry.py attest ghcr.io/freedomofpress/dangerzone/dangerzone:latest --repo freedomofpress/dangerzone
|
||||
dangerzone-image attest-provenance ghcr.io/freedomofpress/dangerzone/dangerzone --repository freedomofpress/dangerzone
|
||||
```
|
||||
|
||||
In case of sucess, it will report back:
|
||||
|
@ -21,3 +21,36 @@ In case of sucess, it will report back:
|
|||
```
|
||||
🎉 The image available at `ghcr.io/freedomofpress/dangerzone/dangerzone:latest` has been built by Github runners from the `freedomofpress/dangerzone` repository.
|
||||
```
|
||||
|
||||
## Install updates
|
||||
|
||||
To check if a new container image has been released, and update your local installation with it, you can use the following commands:
|
||||
|
||||
```bash
|
||||
./dev_scripts/dangerzone-image --debug upgrade ghcr.io/almet/dangerzone/dangerzone
|
||||
```
|
||||
|
||||
## Verify local
|
||||
|
||||
You can verify that the image you have locally matches the stored signatures, and that these have been signed with a trusted public key:
|
||||
|
||||
```bash
|
||||
dangerzone-image verify-local ghcr.io/almet/dangerzone/dangerzone
|
||||
```
|
||||
|
||||
## Air-gapped environments
|
||||
|
||||
In order to make updates on an air-gapped environment, you will need to prepare an archive for the air-gapped environment. This archive will contain all the needed material to validate that the new container image has been signed and is valid.
|
||||
|
||||
On the machine on which you prepare the packages:
|
||||
|
||||
```bash
|
||||
dangerzone-image prepare-archive --output dz-fa94872.tar ghcr.io/almet/dangerzone/dangerzone@sha256:fa948726aac29a6ac49f01ec8fbbac18522b35b2491fdf716236a0b3502a2ca7
|
||||
```
|
||||
|
||||
On the airgapped machine, copy the file and run the following command:
|
||||
|
||||
```bash
|
||||
dangerzone-image load-archive dz-fa94872.tar
|
||||
```
|
||||
|
||||
|
|
Loading…
Reference in a new issue