mirror of
https://github.com/freedomofpress/dangerzone.git
synced 2025-04-28 09:52:37 +02:00
Merge db9652742a
into 83be5fb151
This commit is contained in:
commit
7135910ad8
52 changed files with 2594 additions and 510 deletions
44
.github/workflows/build-push-image.yml
vendored
44
.github/workflows/build-push-image.yml
vendored
|
@ -15,11 +15,21 @@ on:
|
||||||
reproduce:
|
reproduce:
|
||||||
required: true
|
required: true
|
||||||
type: boolean
|
type: boolean
|
||||||
|
sign:
|
||||||
|
required: true
|
||||||
|
type: boolean
|
||||||
|
key_name:
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: "dangerzone-tests"
|
||||||
|
key_cache:
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: "v1-keypair-${{ github.ref_name }}" # unique for the branch / PR
|
||||||
secrets:
|
secrets:
|
||||||
registry_token:
|
registry_token:
|
||||||
required: true
|
required: true
|
||||||
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
lint:
|
lint:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
@ -44,6 +54,7 @@ jobs:
|
||||||
debian_archive_date: ${{ steps.params.outputs.debian_archive_date }}
|
debian_archive_date: ${{ steps.params.outputs.debian_archive_date }}
|
||||||
source_date_epoch: ${{ steps.params.outputs.source_date_epoch }}
|
source_date_epoch: ${{ steps.params.outputs.source_date_epoch }}
|
||||||
image: ${{ steps.params.outputs.full_image_name }}
|
image: ${{ steps.params.outputs.full_image_name }}
|
||||||
|
tag: ${{ steps.params.outputs.tag }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
|
@ -73,6 +84,7 @@ jobs:
|
||||||
debian_archive_date: ${{ needs.prepare.outputs.debian_archive_date }}
|
debian_archive_date: ${{ needs.prepare.outputs.debian_archive_date }}
|
||||||
source_date_epoch: ${{ needs.prepare.outputs.source_date_epoch }}
|
source_date_epoch: ${{ needs.prepare.outputs.source_date_epoch }}
|
||||||
image: ${{ needs.prepare.outputs.image }}
|
image: ${{ needs.prepare.outputs.image }}
|
||||||
|
tag: ${{ needs.prepare.outputs.tag }}
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
|
@ -140,6 +152,7 @@ jobs:
|
||||||
debian_archive_date: ${{ needs.build.outputs.debian_archive_date }}
|
debian_archive_date: ${{ needs.build.outputs.debian_archive_date }}
|
||||||
source_date_epoch: ${{ needs.build.outputs.source_date_epoch }}
|
source_date_epoch: ${{ needs.build.outputs.source_date_epoch }}
|
||||||
image: ${{ needs.build.outputs.image }}
|
image: ${{ needs.build.outputs.image }}
|
||||||
|
tag: ${{ needs.build.outputs.tag }}
|
||||||
digest_root: ${{ steps.image.outputs.digest_root }}
|
digest_root: ${{ steps.image.outputs.digest_root }}
|
||||||
digest_amd64: ${{ steps.image.outputs.digest_amd64 }}
|
digest_amd64: ${{ steps.image.outputs.digest_amd64 }}
|
||||||
digest_arm64: ${{ steps.image.outputs.digest_arm64 }}
|
digest_arm64: ${{ steps.image.outputs.digest_arm64 }}
|
||||||
|
@ -246,3 +259,32 @@ jobs:
|
||||||
--platform \
|
--platform \
|
||||||
linux/${{ matrix.platform.name }} \
|
linux/${{ matrix.platform.name }} \
|
||||||
${{ needs.merge.outputs[format('digest_{0}', matrix.platform.name)] }}
|
${{ needs.merge.outputs[format('digest_{0}', matrix.platform.name)] }}
|
||||||
|
|
||||||
|
sign:
|
||||||
|
if: ${{ inputs.sign }}
|
||||||
|
runs-on: "ubuntu-latest"
|
||||||
|
env:
|
||||||
|
COSIGN_PASSWORD: "password"
|
||||||
|
COSIGN_YES: true
|
||||||
|
needs:
|
||||||
|
- merge
|
||||||
|
# outputs: add signature location ?
|
||||||
|
steps:
|
||||||
|
- name: Install Cosign
|
||||||
|
uses: sigstore/cosign-installer@d7d6bc7722e3daa8354c50bcb52f4837da5e9b6a
|
||||||
|
with:
|
||||||
|
cosign-release: 'v2.5.0'
|
||||||
|
- name: Check install
|
||||||
|
run: cosign version
|
||||||
|
- name: Generate keypair
|
||||||
|
run: |-
|
||||||
|
cosign generate-key-pair --output-key-prefix="${{ inputs.key_name }}"
|
||||||
|
- name: Cache keypair
|
||||||
|
uses: actions/cache@v4
|
||||||
|
with:
|
||||||
|
path: "${{ inputs.key_name }}.*"
|
||||||
|
key: ${{ inputs.key_cache }}
|
||||||
|
enableCrossOsArchive: true
|
||||||
|
- name: Sign container
|
||||||
|
run: |-
|
||||||
|
cosign sign --key ${{ inputs.key_name }}.key ${{ inputs.registry }}/${{ inputs.image_name }}:${{ needs.merge.outputs.tag }}@${{ needs.merge.outputs.digest_root }}
|
||||||
|
|
52
.github/workflows/ci.yml
vendored
52
.github/workflows/ci.yml
vendored
|
@ -11,11 +11,10 @@ on:
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
packages: write
|
packages: write
|
||||||
|
actions: read # for detecting the Github Actions environment.
|
||||||
|
id-token: write # for creating OIDC tokens for signing.
|
||||||
|
|
||||||
env:
|
env:
|
||||||
REGISTRY_USER: ${{ github.actor }}
|
|
||||||
REGISTRY_PASSWORD: ${{ github.token }}
|
|
||||||
IMAGE_REGISTRY: ghcr.io/${{ github.repository_owner }}
|
|
||||||
QT_SELECT: "qt6"
|
QT_SELECT: "qt6"
|
||||||
|
|
||||||
# Disable multiple concurrent runs on the same branch
|
# Disable multiple concurrent runs on the same branch
|
||||||
|
@ -45,35 +44,18 @@ jobs:
|
||||||
# This is already built daily by the "build.yml" file
|
# This is already built daily by the "build.yml" file
|
||||||
# But we also want to include this in the checks that run on each push.
|
# But we also want to include this in the checks that run on each push.
|
||||||
build-container-image:
|
build-container-image:
|
||||||
runs-on: ubuntu-24.04
|
name: Build, push and sign container image
|
||||||
steps:
|
uses: ./.github/workflows/build-push-image.yml
|
||||||
- uses: actions/checkout@v4
|
with:
|
||||||
with:
|
registry: "ghcr.io/${{ github.repository_owner }}"
|
||||||
fetch-depth: 0
|
registry_user: ${{ github.actor }}
|
||||||
|
image_name: "dangerzone/dangerzone-staging"
|
||||||
- name: Get current date
|
reproduce: false
|
||||||
id: date
|
sign: true
|
||||||
run: echo "date=$(date +'%Y-%m-%d')" >> $GITHUB_OUTPUT
|
key_name: "dangerzone-tests"
|
||||||
|
key_cache: "v1-test-keypair-${{ github.ref_name }}"
|
||||||
- name: Cache container image
|
secrets:
|
||||||
id: cache-container-image
|
registry_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
uses: actions/cache@v4
|
|
||||||
with:
|
|
||||||
key: v5-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/*.py', 'dangerzone/container_helpers/*', 'install/common/build-image.py') }}
|
|
||||||
path: |-
|
|
||||||
share/container.tar
|
|
||||||
share/image-id.txt
|
|
||||||
|
|
||||||
- name: Build Dangerzone container image
|
|
||||||
if: ${{ steps.cache-container-image.outputs.cache-hit != 'true' }}
|
|
||||||
run: |
|
|
||||||
python3 ./install/common/build-image.py
|
|
||||||
|
|
||||||
- name: Upload container image
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: container.tar
|
|
||||||
path: share/container.tar
|
|
||||||
|
|
||||||
download-tessdata:
|
download-tessdata:
|
||||||
name: Download and cache Tesseract data
|
name: Download and cache Tesseract data
|
||||||
|
@ -227,9 +209,7 @@ jobs:
|
||||||
uses: actions/cache/restore@v4
|
uses: actions/cache/restore@v4
|
||||||
with:
|
with:
|
||||||
key: v5-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/*.py', 'dangerzone/container_helpers/*', 'install/common/build-image.py') }}
|
key: v5-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/*.py', 'dangerzone/container_helpers/*', 'install/common/build-image.py') }}
|
||||||
path: |-
|
path: share/container.tar
|
||||||
share/container.tar
|
|
||||||
share/image-id.txt
|
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
|
|
||||||
- name: Build Dangerzone .deb
|
- name: Build Dangerzone .deb
|
||||||
|
@ -336,7 +316,6 @@ jobs:
|
||||||
key: v5-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/*.py', 'dangerzone/container_helpers/*', 'install/common/build-image.py') }}
|
key: v5-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/*.py', 'dangerzone/container_helpers/*', 'install/common/build-image.py') }}
|
||||||
path: |-
|
path: |-
|
||||||
share/container.tar
|
share/container.tar
|
||||||
share/image-id.txt
|
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
|
|
||||||
- name: Build Dangerzone .rpm
|
- name: Build Dangerzone .rpm
|
||||||
|
@ -433,7 +412,6 @@ jobs:
|
||||||
key: v5-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/*.py', 'dangerzone/container_helpers/*', 'install/common/build-image.py') }}
|
key: v5-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/*.py', 'dangerzone/container_helpers/*', 'install/common/build-image.py') }}
|
||||||
path: |-
|
path: |-
|
||||||
share/container.tar
|
share/container.tar
|
||||||
share/image-id.txt
|
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
|
|
||||||
- name: Restore cached tessdata
|
- name: Restore cached tessdata
|
||||||
|
|
|
@ -18,5 +18,6 @@ jobs:
|
||||||
registry_user: ${{ github.actor }}
|
registry_user: ${{ github.actor }}
|
||||||
image_name: dangerzone/dangerzone
|
image_name: dangerzone/dangerzone
|
||||||
reproduce: true
|
reproduce: true
|
||||||
|
sign: false
|
||||||
secrets:
|
secrets:
|
||||||
registry_token: ${{ secrets.GITHUB_TOKEN }}
|
registry_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
|
@ -35,7 +35,7 @@ RUN \
|
||||||
apt-get update && \
|
apt-get update && \
|
||||||
apt-get install -y --no-install-recommends \
|
apt-get install -y --no-install-recommends \
|
||||||
python3 python3-fitz libreoffice-nogui libreoffice-java-common \
|
python3 python3-fitz libreoffice-nogui libreoffice-java-common \
|
||||||
python3 python3-magic default-jre-headless fonts-noto-cjk fonts-dejavu \
|
python3-magic default-jre-headless fonts-noto-cjk fonts-dejavu \
|
||||||
runsc unzip wget && \
|
runsc unzip wget && \
|
||||||
: "Clean up for improving reproducibility (optional)" && \
|
: "Clean up for improving reproducibility (optional)" && \
|
||||||
rm -rf /var/cache/fontconfig/ && \
|
rm -rf /var/cache/fontconfig/ && \
|
||||||
|
|
|
@ -35,7 +35,7 @@ RUN \
|
||||||
apt-get update && \
|
apt-get update && \
|
||||||
apt-get install -y --no-install-recommends \
|
apt-get install -y --no-install-recommends \
|
||||||
python3 python3-fitz libreoffice-nogui libreoffice-java-common \
|
python3 python3-fitz libreoffice-nogui libreoffice-java-common \
|
||||||
python3 python3-magic default-jre-headless fonts-noto-cjk fonts-dejavu \
|
python3-magic default-jre-headless fonts-noto-cjk fonts-dejavu \
|
||||||
runsc unzip wget && \
|
runsc unzip wget && \
|
||||||
: "Clean up for improving reproducibility (optional)" && \
|
: "Clean up for improving reproducibility (optional)" && \
|
||||||
rm -rf /var/cache/fontconfig/ && \
|
rm -rf /var/cache/fontconfig/ && \
|
||||||
|
|
|
@ -71,8 +71,8 @@ def cli_main(
|
||||||
) -> None:
|
) -> None:
|
||||||
setup_logging()
|
setup_logging()
|
||||||
display_banner()
|
display_banner()
|
||||||
|
settings = Settings()
|
||||||
if set_container_runtime:
|
if set_container_runtime:
|
||||||
settings = Settings()
|
|
||||||
if set_container_runtime == "default":
|
if set_container_runtime == "default":
|
||||||
settings.unset_custom_runtime()
|
settings.unset_custom_runtime()
|
||||||
click.echo(
|
click.echo(
|
||||||
|
@ -117,7 +117,8 @@ def cli_main(
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# Ensure container is installed
|
# Ensure container is installed
|
||||||
dangerzone.isolation_provider.install()
|
should_upgrade = bool(settings.get("updater_check_all"))
|
||||||
|
dangerzone.isolation_provider.install(should_upgrade)
|
||||||
|
|
||||||
# Convert the document
|
# Convert the document
|
||||||
print_header("Converting document to safe PDF")
|
print_header("Converting document to safe PDF")
|
||||||
|
|
|
@ -4,13 +4,14 @@ import platform
|
||||||
import shutil
|
import shutil
|
||||||
import subprocess
|
import subprocess
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import List, Optional, Tuple
|
from typing import IO, Callable, List, Optional, Tuple
|
||||||
|
|
||||||
from . import errors
|
from . import errors
|
||||||
from .settings import Settings
|
from .settings import Settings
|
||||||
from .util import get_resource_path, get_subprocess_startupinfo
|
from .util import get_resource_path, get_subprocess_startupinfo
|
||||||
|
|
||||||
CONTAINER_NAME = "dangerzone.rocks/dangerzone"
|
OLD_CONTAINER_NAME = "dangerzone.rocks/dangerzone"
|
||||||
|
CONTAINER_NAME = "ghcr.io/freedomofpress/dangerzone/dangerzone"
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -55,6 +56,11 @@ class Runtime(object):
|
||||||
return "podman" if platform.system() == "Linux" else "docker"
|
return "podman" if platform.system() == "Linux" else "docker"
|
||||||
|
|
||||||
|
|
||||||
|
def subprocess_run(*args, **kwargs) -> subprocess.CompletedProcess:
|
||||||
|
"""subprocess.run with the correct startupinfo for Windows."""
|
||||||
|
return subprocess.run(*args, startupinfo=get_subprocess_startupinfo(), **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def get_runtime_version(runtime: Optional[Runtime] = None) -> Tuple[int, int]:
|
def get_runtime_version(runtime: Optional[Runtime] = None) -> Tuple[int, int]:
|
||||||
"""Get the major/minor parts of the Docker/Podman version.
|
"""Get the major/minor parts of the Docker/Podman version.
|
||||||
|
|
||||||
|
@ -74,9 +80,8 @@ def get_runtime_version(runtime: Optional[Runtime] = None) -> Tuple[int, int]:
|
||||||
|
|
||||||
cmd = [str(runtime.path), "version", "-f", query]
|
cmd = [str(runtime.path), "version", "-f", query]
|
||||||
try:
|
try:
|
||||||
version = subprocess.run(
|
version = subprocess_run(
|
||||||
cmd,
|
cmd,
|
||||||
startupinfo=get_subprocess_startupinfo(),
|
|
||||||
capture_output=True,
|
capture_output=True,
|
||||||
check=True,
|
check=True,
|
||||||
).stdout.decode()
|
).stdout.decode()
|
||||||
|
@ -149,12 +154,6 @@ def delete_image_tag(tag: str) -> None:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_expected_tag() -> str:
|
|
||||||
"""Get the tag of the Dangerzone image tarball from the image-id.txt file."""
|
|
||||||
with get_resource_path("image-id.txt").open() as f:
|
|
||||||
return f.read().strip()
|
|
||||||
|
|
||||||
|
|
||||||
def load_image_tarball() -> None:
|
def load_image_tarball() -> None:
|
||||||
runtime = Runtime()
|
runtime = Runtime()
|
||||||
log.info("Installing Dangerzone container image...")
|
log.info("Installing Dangerzone container image...")
|
||||||
|
@ -198,4 +197,90 @@ def load_image_tarball() -> None:
|
||||||
add_image_tag(bad_tag, good_tag)
|
add_image_tag(bad_tag, good_tag)
|
||||||
delete_image_tag(bad_tag)
|
delete_image_tag(bad_tag)
|
||||||
|
|
||||||
log.info("Successfully installed container image")
|
|
||||||
|
def tag_image_by_digest(digest: str, tag: str) -> None:
|
||||||
|
"""Tag a container image by digest.
|
||||||
|
The sha256: prefix should be omitted from the digest.
|
||||||
|
"""
|
||||||
|
runtime = Runtime()
|
||||||
|
image_id = get_image_id_by_digest(digest)
|
||||||
|
cmd = [str(runtime.path), "tag", image_id, tag]
|
||||||
|
log.debug(" ".join(cmd))
|
||||||
|
subprocess_run(cmd, check=True)
|
||||||
|
|
||||||
|
|
||||||
|
def get_image_id_by_digest(digest: str) -> str:
|
||||||
|
"""Get an image ID from a digest.
|
||||||
|
The sha256: prefix should be omitted from the digest.
|
||||||
|
"""
|
||||||
|
runtime = Runtime()
|
||||||
|
cmd = [
|
||||||
|
str(runtime.path),
|
||||||
|
"images",
|
||||||
|
"-f",
|
||||||
|
f"digest=sha256:{digest}",
|
||||||
|
"--format",
|
||||||
|
"{{.Id}}",
|
||||||
|
]
|
||||||
|
log.debug(" ".join(cmd))
|
||||||
|
process = subprocess_run(cmd, check=True, capture_output=True)
|
||||||
|
# In case we have multiple lines, we only want the first one.
|
||||||
|
return process.stdout.decode().strip().split("\n")[0]
|
||||||
|
|
||||||
|
|
||||||
|
def container_pull(
|
||||||
|
image: str, manifest_digest: str, callback: Optional[Callable] = None
|
||||||
|
):
|
||||||
|
"""Pull a container image from a registry."""
|
||||||
|
runtime = Runtime()
|
||||||
|
cmd = [str(runtime.path), "pull", f"{image}@sha256:{manifest_digest}"]
|
||||||
|
process = subprocess.Popen(
|
||||||
|
cmd,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.STDOUT,
|
||||||
|
text=True,
|
||||||
|
bufsize=1,
|
||||||
|
)
|
||||||
|
|
||||||
|
if callback:
|
||||||
|
for line in process.stdout: # type: ignore
|
||||||
|
callback(line)
|
||||||
|
|
||||||
|
process.wait()
|
||||||
|
if process.returncode != 0:
|
||||||
|
raise errors.ContainerPullException(
|
||||||
|
f"Could not pull the container image: {process.returncode}"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_local_image_digest(image: str) -> str:
|
||||||
|
"""
|
||||||
|
Returns a image hash from a local image name
|
||||||
|
"""
|
||||||
|
# Get the image hash from the "podman images" command.
|
||||||
|
# It's not possible to use "podman inspect" here as it
|
||||||
|
# returns the digest of the architecture-bound image
|
||||||
|
runtime = Runtime()
|
||||||
|
cmd = [str(runtime.path), "images", image, "--format", "{{.Digest}}"]
|
||||||
|
log.debug(" ".join(cmd))
|
||||||
|
try:
|
||||||
|
result = subprocess_run(
|
||||||
|
cmd,
|
||||||
|
capture_output=True,
|
||||||
|
check=True,
|
||||||
|
)
|
||||||
|
lines = result.stdout.decode().strip().split("\n")
|
||||||
|
if len(lines) != 1:
|
||||||
|
raise errors.MultipleImagesFoundException(
|
||||||
|
f"Expected a single line of output, got {len(lines)} lines"
|
||||||
|
)
|
||||||
|
image_digest = lines[0].replace("sha256:", "")
|
||||||
|
if not image_digest:
|
||||||
|
raise errors.ImageNotPresentException(
|
||||||
|
f"The image {image} does not exist locally"
|
||||||
|
)
|
||||||
|
return image_digest
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
raise errors.ImageNotPresentException(
|
||||||
|
f"The image {image} does not exist locally"
|
||||||
|
)
|
||||||
|
|
|
@ -122,25 +122,37 @@ def handle_document_errors(func: F) -> F:
|
||||||
#### Container-related errors
|
#### Container-related errors
|
||||||
|
|
||||||
|
|
||||||
class ImageNotPresentException(Exception):
|
class ContainerException(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class ImageInstallationException(Exception):
|
class ImageNotPresentException(ContainerException):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class NoContainerTechException(Exception):
|
class MultipleImagesFoundException(ContainerException):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class ImageInstallationException(ContainerException):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class NoContainerTechException(ContainerException):
|
||||||
def __init__(self, container_tech: str) -> None:
|
def __init__(self, container_tech: str) -> None:
|
||||||
super().__init__(f"{container_tech} is not installed")
|
super().__init__(f"{container_tech} is not installed")
|
||||||
|
|
||||||
|
|
||||||
class NotAvailableContainerTechException(Exception):
|
class NotAvailableContainerTechException(ContainerException):
|
||||||
def __init__(self, container_tech: str, error: str) -> None:
|
def __init__(self, container_tech: str, error: str) -> None:
|
||||||
self.error = error
|
self.error = error
|
||||||
self.container_tech = container_tech
|
self.container_tech = container_tech
|
||||||
super().__init__(f"{container_tech} is not available")
|
super().__init__(f"{container_tech} is not available")
|
||||||
|
|
||||||
|
|
||||||
class UnsupportedContainerRuntime(Exception):
|
class UnsupportedContainerRuntime(ContainerException):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class ContainerPullException(ContainerException):
|
||||||
pass
|
pass
|
||||||
|
|
|
@ -24,6 +24,8 @@ from ..document import Document
|
||||||
from ..isolation_provider.container import Container
|
from ..isolation_provider.container import Container
|
||||||
from ..isolation_provider.dummy import Dummy
|
from ..isolation_provider.dummy import Dummy
|
||||||
from ..isolation_provider.qubes import Qubes, is_qubes_native_conversion
|
from ..isolation_provider.qubes import Qubes, is_qubes_native_conversion
|
||||||
|
from ..updater import errors as updater_errors
|
||||||
|
from ..updater import releases
|
||||||
from ..util import get_resource_path, get_version
|
from ..util import get_resource_path, get_version
|
||||||
from .logic import DangerzoneGui
|
from .logic import DangerzoneGui
|
||||||
from .main_window import MainWindow
|
from .main_window import MainWindow
|
||||||
|
@ -161,16 +163,15 @@ def gui_main(dummy_conversion: bool, filenames: Optional[List[str]]) -> bool:
|
||||||
window.register_update_handler(updater.finished)
|
window.register_update_handler(updater.finished)
|
||||||
|
|
||||||
log.debug("Consulting updater settings before checking for updates")
|
log.debug("Consulting updater settings before checking for updates")
|
||||||
if updater.should_check_for_updates():
|
should_check = updater.should_check_for_updates()
|
||||||
|
|
||||||
|
if should_check:
|
||||||
log.debug("Checking for updates")
|
log.debug("Checking for updates")
|
||||||
updater.start()
|
updater.start()
|
||||||
else:
|
else:
|
||||||
log.debug("Will not check for updates, based on updater settings")
|
log.debug("Will not check for updates, based on updater settings")
|
||||||
|
|
||||||
# Ensure the status of the toggle updates checkbox is updated, after the user is
|
window.toggle_updates_action.setChecked(should_check)
|
||||||
# prompted to enable updates.
|
|
||||||
window.toggle_updates_action.setChecked(bool(updater.check))
|
|
||||||
|
|
||||||
if filenames:
|
if filenames:
|
||||||
open_files(filenames)
|
open_files(filenames)
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
import io
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import platform
|
import platform
|
||||||
|
@ -5,30 +6,32 @@ import tempfile
|
||||||
import typing
|
import typing
|
||||||
from multiprocessing.pool import ThreadPool
|
from multiprocessing.pool import ThreadPool
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import List, Optional
|
from typing import Callable, List, Optional
|
||||||
|
|
||||||
# FIXME: See https://github.com/freedomofpress/dangerzone/issues/320 for more details.
|
# FIXME: See https://github.com/freedomofpress/dangerzone/issues/320 for more details.
|
||||||
if typing.TYPE_CHECKING:
|
if typing.TYPE_CHECKING:
|
||||||
from PySide2 import QtCore, QtGui, QtSvg, QtWidgets
|
from PySide2 import QtCore, QtGui, QtSvg, QtWidgets
|
||||||
from PySide2.QtCore import Qt
|
from PySide2.QtCore import Qt
|
||||||
|
from PySide2.QtGui import QTextCursor
|
||||||
from PySide2.QtWidgets import QAction, QTextEdit
|
from PySide2.QtWidgets import QAction, QTextEdit
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
from PySide6 import QtCore, QtGui, QtSvg, QtWidgets
|
from PySide6 import QtCore, QtGui, QtSvg, QtWidgets
|
||||||
from PySide6.QtCore import Qt
|
from PySide6.QtCore import Qt
|
||||||
from PySide6.QtGui import QAction
|
from PySide6.QtGui import QAction, QTextCursor
|
||||||
from PySide6.QtWidgets import QTextEdit
|
from PySide6.QtWidgets import QTextEdit
|
||||||
except ImportError:
|
except ImportError:
|
||||||
from PySide2 import QtCore, QtGui, QtSvg, QtWidgets
|
from PySide2 import QtCore, QtGui, QtSvg, QtWidgets
|
||||||
from PySide2.QtCore import Qt
|
from PySide2.QtCore import Qt
|
||||||
|
from PySide2.QtGui import QTextCursor
|
||||||
from PySide2.QtWidgets import QAction, QTextEdit
|
from PySide2.QtWidgets import QAction, QTextEdit
|
||||||
|
|
||||||
from .. import errors
|
from .. import errors
|
||||||
from ..document import SAFE_EXTENSION, Document
|
from ..document import SAFE_EXTENSION, Document
|
||||||
from ..isolation_provider.qubes import is_qubes_native_conversion
|
from ..isolation_provider.qubes import is_qubes_native_conversion
|
||||||
|
from ..updater.releases import UpdateReport
|
||||||
from ..util import format_exception, get_resource_path, get_version
|
from ..util import format_exception, get_resource_path, get_version
|
||||||
from .logic import Alert, CollapsibleBox, DangerzoneGui, UpdateDialog
|
from .logic import Alert, CollapsibleBox, DangerzoneGui, UpdateDialog
|
||||||
from .updater import UpdateReport
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -163,7 +166,7 @@ class MainWindow(QtWidgets.QMainWindow):
|
||||||
self.toggle_updates_action.triggered.connect(self.toggle_updates_triggered)
|
self.toggle_updates_action.triggered.connect(self.toggle_updates_triggered)
|
||||||
self.toggle_updates_action.setCheckable(True)
|
self.toggle_updates_action.setCheckable(True)
|
||||||
self.toggle_updates_action.setChecked(
|
self.toggle_updates_action.setChecked(
|
||||||
bool(self.dangerzone.settings.get("updater_check"))
|
bool(self.dangerzone.settings.get("updater_check_all"))
|
||||||
)
|
)
|
||||||
|
|
||||||
# Add the "Exit" action
|
# Add the "Exit" action
|
||||||
|
@ -281,7 +284,7 @@ class MainWindow(QtWidgets.QMainWindow):
|
||||||
def toggle_updates_triggered(self) -> None:
|
def toggle_updates_triggered(self) -> None:
|
||||||
"""Change the underlying update check settings based on the user's choice."""
|
"""Change the underlying update check settings based on the user's choice."""
|
||||||
check = self.toggle_updates_action.isChecked()
|
check = self.toggle_updates_action.isChecked()
|
||||||
self.dangerzone.settings.set("updater_check", check)
|
self.dangerzone.settings.set("updater_check_all", check)
|
||||||
self.dangerzone.settings.save()
|
self.dangerzone.settings.save()
|
||||||
|
|
||||||
def handle_docker_desktop_version_check(
|
def handle_docker_desktop_version_check(
|
||||||
|
@ -436,15 +439,21 @@ class MainWindow(QtWidgets.QMainWindow):
|
||||||
|
|
||||||
class InstallContainerThread(QtCore.QThread):
|
class InstallContainerThread(QtCore.QThread):
|
||||||
finished = QtCore.Signal(str)
|
finished = QtCore.Signal(str)
|
||||||
|
process_stdout = QtCore.Signal(str)
|
||||||
|
|
||||||
def __init__(self, dangerzone: DangerzoneGui) -> None:
|
def __init__(
|
||||||
|
self, dangerzone: DangerzoneGui, callback: Optional[Callable] = None
|
||||||
|
) -> None:
|
||||||
super(InstallContainerThread, self).__init__()
|
super(InstallContainerThread, self).__init__()
|
||||||
self.dangerzone = dangerzone
|
self.dangerzone = dangerzone
|
||||||
|
|
||||||
def run(self) -> None:
|
def run(self) -> None:
|
||||||
error = None
|
error = None
|
||||||
try:
|
try:
|
||||||
installed = self.dangerzone.isolation_provider.install()
|
should_upgrade = bool(self.dangerzone.settings.get("updater_check_all"))
|
||||||
|
installed = self.dangerzone.isolation_provider.install(
|
||||||
|
should_upgrade=should_upgrade, callback=self.process_stdout.emit
|
||||||
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log.error("Container installation problem")
|
log.error("Container installation problem")
|
||||||
error = format_exception(e)
|
error = format_exception(e)
|
||||||
|
@ -479,11 +488,20 @@ class TracebackWidget(QTextEdit):
|
||||||
# Enable copying
|
# Enable copying
|
||||||
self.setTextInteractionFlags(Qt.TextSelectableByMouse)
|
self.setTextInteractionFlags(Qt.TextSelectableByMouse)
|
||||||
|
|
||||||
|
self.current_output = ""
|
||||||
|
|
||||||
def set_content(self, error: Optional[str] = None) -> None:
|
def set_content(self, error: Optional[str] = None) -> None:
|
||||||
if error:
|
if error:
|
||||||
self.setPlainText(error)
|
self.setPlainText(error)
|
||||||
self.setVisible(True)
|
self.setVisible(True)
|
||||||
|
|
||||||
|
def process_output(self, line):
|
||||||
|
self.current_output += line
|
||||||
|
self.setText(self.current_output)
|
||||||
|
cursor = self.textCursor()
|
||||||
|
cursor.movePosition(QTextCursor.MoveOperation.End)
|
||||||
|
self.setTextCursor(cursor)
|
||||||
|
|
||||||
|
|
||||||
class WaitingWidgetContainer(WaitingWidget):
|
class WaitingWidgetContainer(WaitingWidget):
|
||||||
# These are the possible states that the WaitingWidget can show.
|
# These are the possible states that the WaitingWidget can show.
|
||||||
|
@ -623,8 +641,14 @@ class WaitingWidgetContainer(WaitingWidget):
|
||||||
"Installing the Dangerzone container image.<br><br>"
|
"Installing the Dangerzone container image.<br><br>"
|
||||||
"This might take a few minutes..."
|
"This might take a few minutes..."
|
||||||
)
|
)
|
||||||
|
self.traceback.setVisible(True)
|
||||||
|
|
||||||
self.install_container_t = InstallContainerThread(self.dangerzone)
|
self.install_container_t = InstallContainerThread(self.dangerzone)
|
||||||
self.install_container_t.finished.connect(self.installation_finished)
|
self.install_container_t.finished.connect(self.installation_finished)
|
||||||
|
|
||||||
|
self.install_container_t.process_stdout.connect(
|
||||||
|
self.traceback.process_output
|
||||||
|
)
|
||||||
self.install_container_t.start()
|
self.install_container_t.start()
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,15 +1,7 @@
|
||||||
"""A module that contains the logic for checking for updates."""
|
|
||||||
|
|
||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import platform
|
|
||||||
import sys
|
|
||||||
import time
|
|
||||||
import typing
|
import typing
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from packaging import version
|
|
||||||
|
|
||||||
if typing.TYPE_CHECKING:
|
if typing.TYPE_CHECKING:
|
||||||
from PySide2 import QtCore, QtWidgets
|
from PySide2 import QtCore, QtWidgets
|
||||||
else:
|
else:
|
||||||
|
@ -18,36 +10,33 @@ else:
|
||||||
except ImportError:
|
except ImportError:
|
||||||
from PySide2 import QtCore, QtWidgets
|
from PySide2 import QtCore, QtWidgets
|
||||||
|
|
||||||
# XXX implict import for "markdown" module required for Cx_Freeze to build on Windows
|
from ..updater import errors, releases
|
||||||
# See https://github.com/freedomofpress/dangerzone/issues/501
|
|
||||||
import html.parser # noqa: F401
|
|
||||||
|
|
||||||
import markdown
|
|
||||||
import requests
|
|
||||||
|
|
||||||
from ..util import get_version
|
|
||||||
from .logic import Alert, DangerzoneGui
|
from .logic import Alert, DangerzoneGui
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
MSG_CONFIRM_UPDATE_CHECKS = """\
|
MSG_CONFIRM_UPDATE_CHECKS = """\
|
||||||
<p><b>Do you want Dangerzone to automatically check for updates?</b></p>
|
<p>
|
||||||
|
<b>Do you want Dangerzone to automatically check for updates and apply them?</b>
|
||||||
|
</p>
|
||||||
|
|
||||||
<p>If you accept, Dangerzone will check the
|
<p>If you accept, Dangerzone will check for updates of the sandbox and apply them
|
||||||
|
automatically. This will ensure that you always have the latest version of the sandbox,
|
||||||
|
which is critical for the software to operate securely.</p>
|
||||||
|
|
||||||
|
<p>Sandbox updates may include security patches and bug fixes, but won't include new features.</p>
|
||||||
|
|
||||||
|
<p>Additionally, Dangerzone will check the
|
||||||
<a href="https://github.com/freedomofpress/dangerzone/releases">latest releases page</a>
|
<a href="https://github.com/freedomofpress/dangerzone/releases">latest releases page</a>
|
||||||
in github.com on startup. Otherwise it will make no network requests and
|
in github.com, and inform you about new releases.
|
||||||
won't inform you about new releases.</p>
|
|
||||||
|
Otherwise it will make no network requests and won't inform you about new releases.</p>
|
||||||
|
|
||||||
<p>If you prefer another way of getting notified about new releases, we suggest adding
|
<p>If you prefer another way of getting notified about new releases, we suggest adding
|
||||||
to your RSS reader our
|
to your RSS reader our
|
||||||
<a href="https://fosstodon.org/@dangerzone.rss">Mastodon feed</a>. For more information
|
<a href="https://dangerzone.rocks/feed.xml">Dangerzone News feed</a>.</p>
|
||||||
about updates, check
|
|
||||||
<a href="https://github.com/freedomofpress/dangerzone/wiki/Updates">this webpage</a>.</p>
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
UPDATE_CHECK_COOLDOWN_SECS = 60 * 60 * 12 # Check for updates at most every 12 hours.
|
|
||||||
|
|
||||||
|
|
||||||
class UpdateCheckPrompt(Alert):
|
class UpdateCheckPrompt(Alert):
|
||||||
"""The prompt that asks the users if they want to enable update checks."""
|
"""The prompt that asks the users if they want to enable update checks."""
|
||||||
|
@ -55,7 +44,7 @@ class UpdateCheckPrompt(Alert):
|
||||||
x_pressed = False
|
x_pressed = False
|
||||||
|
|
||||||
def closeEvent(self, event: QtCore.QEvent) -> None:
|
def closeEvent(self, event: QtCore.QEvent) -> None:
|
||||||
"""Detect when a user has pressed "X" in the title bar.
|
"""Detect when a user has pressed "X" in the title bar (to close the dialog).
|
||||||
|
|
||||||
This function is called when a user clicks on "X" in the title bar. We want to
|
This function is called when a user clicks on "X" in the title bar. We want to
|
||||||
differentiate between the user clicking on "Cancel" and clicking on "X", since
|
differentiate between the user clicking on "Cancel" and clicking on "X", since
|
||||||
|
@ -76,72 +65,32 @@ class UpdateCheckPrompt(Alert):
|
||||||
return buttons_layout
|
return buttons_layout
|
||||||
|
|
||||||
|
|
||||||
class UpdateReport:
|
|
||||||
"""A report for an update check."""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
version: Optional[str] = None,
|
|
||||||
changelog: Optional[str] = None,
|
|
||||||
error: Optional[str] = None,
|
|
||||||
):
|
|
||||||
self.version = version
|
|
||||||
self.changelog = changelog
|
|
||||||
self.error = error
|
|
||||||
|
|
||||||
def empty(self) -> bool:
|
|
||||||
return self.version is None and self.changelog is None and self.error is None
|
|
||||||
|
|
||||||
|
|
||||||
class UpdaterThread(QtCore.QThread):
|
class UpdaterThread(QtCore.QThread):
|
||||||
"""Check asynchronously for Dangerzone updates.
|
"""Check asynchronously for Dangerzone updates.
|
||||||
|
|
||||||
The Updater class is mainly responsible for the following:
|
The Updater class is mainly responsible for
|
||||||
|
asking the user if they want to enable update checks or not.
|
||||||
1. Asking the user if they want to enable update checks or not.
|
|
||||||
2. Determining when it's the right time to check for updates.
|
|
||||||
3. Hitting the GitHub releases API and learning about updates.
|
|
||||||
|
|
||||||
Since checking for updates is a task that may take some time, we perform it
|
Since checking for updates is a task that may take some time, we perform it
|
||||||
asynchronously, in a Qt thread. This thread then triggers a signal, and informs
|
asynchronously, in a Qt thread.
|
||||||
whoever has connected to it.
|
|
||||||
|
When finished, this thread triggers a signal with the results.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
finished = QtCore.Signal(UpdateReport)
|
finished = QtCore.Signal(releases.UpdateReport)
|
||||||
|
|
||||||
GH_RELEASE_URL = (
|
|
||||||
"https://api.github.com/repos/freedomofpress/dangerzone/releases/latest"
|
|
||||||
)
|
|
||||||
REQ_TIMEOUT = 15
|
|
||||||
|
|
||||||
def __init__(self, dangerzone: DangerzoneGui):
|
def __init__(self, dangerzone: DangerzoneGui):
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.dangerzone = dangerzone
|
self.dangerzone = dangerzone
|
||||||
|
|
||||||
###########
|
|
||||||
# Helpers for updater settings
|
|
||||||
#
|
|
||||||
# These helpers make it easy to retrieve specific updater-related settings, as well
|
|
||||||
# as save the settings file, only when necessary.
|
|
||||||
|
|
||||||
@property
|
|
||||||
def check(self) -> Optional[bool]:
|
|
||||||
return self.dangerzone.settings.get("updater_check")
|
|
||||||
|
|
||||||
@check.setter
|
|
||||||
def check(self, val: bool) -> None:
|
|
||||||
self.dangerzone.settings.set("updater_check", val, autosave=True)
|
|
||||||
|
|
||||||
def prompt_for_checks(self) -> Optional[bool]:
|
def prompt_for_checks(self) -> Optional[bool]:
|
||||||
"""Ask the user if they want to be informed about Dangerzone updates."""
|
"""Ask the user if they want to be informed about Dangerzone updates."""
|
||||||
log.debug("Prompting the user for update checks")
|
log.debug("Prompting the user for update checks")
|
||||||
# FIXME: Handle the case where a user clicks on "X", instead of explicitly
|
|
||||||
# making a choice. We should probably ask them again on the next run.
|
|
||||||
prompt = UpdateCheckPrompt(
|
prompt = UpdateCheckPrompt(
|
||||||
self.dangerzone,
|
self.dangerzone,
|
||||||
message=MSG_CONFIRM_UPDATE_CHECKS,
|
message=MSG_CONFIRM_UPDATE_CHECKS,
|
||||||
ok_text="Check Automatically",
|
ok_text="Enable sandbox updates",
|
||||||
cancel_text="Don't Check",
|
cancel_text="Do not make any requests",
|
||||||
)
|
)
|
||||||
check = prompt.launch()
|
check = prompt.launch()
|
||||||
if not check and prompt.x_pressed:
|
if not check and prompt.x_pressed:
|
||||||
|
@ -149,167 +98,18 @@ class UpdaterThread(QtCore.QThread):
|
||||||
return bool(check)
|
return bool(check)
|
||||||
|
|
||||||
def should_check_for_updates(self) -> bool:
|
def should_check_for_updates(self) -> bool:
|
||||||
"""Determine if we can check for updates based on settings and user prefs.
|
|
||||||
|
|
||||||
Note that this method only checks if the user has expressed an interest for
|
|
||||||
learning about new updates, and not whether we should actually make an update
|
|
||||||
check. Those two things are distinct, actually. For example:
|
|
||||||
|
|
||||||
* A user may have expressed that they want to learn about new updates.
|
|
||||||
* A previous update check may have found out that there's a new version out.
|
|
||||||
* Thus we will always show to the user the cached info about the new version,
|
|
||||||
and won't make a new update check.
|
|
||||||
"""
|
|
||||||
log.debug("Checking platform type")
|
|
||||||
# TODO: Disable updates for Homebrew installations.
|
|
||||||
if platform.system() == "Linux" and not getattr(sys, "dangerzone_dev", False):
|
|
||||||
log.debug("Running on Linux, disabling updates")
|
|
||||||
if not self.check: # if not overidden by user
|
|
||||||
self.check = False
|
|
||||||
return False
|
|
||||||
|
|
||||||
log.debug("Checking if first run of Dangerzone")
|
|
||||||
if self.dangerzone.settings.get("updater_last_check") is None:
|
|
||||||
log.debug("Dangerzone is running for the first time, updates are stalled")
|
|
||||||
self.dangerzone.settings.set("updater_last_check", 0, autosave=True)
|
|
||||||
return False
|
|
||||||
|
|
||||||
log.debug("Checking if user has already expressed their preference")
|
|
||||||
if self.check is None:
|
|
||||||
log.debug("User has not been asked yet for update checks")
|
|
||||||
self.check = self.prompt_for_checks()
|
|
||||||
return bool(self.check)
|
|
||||||
elif not self.check:
|
|
||||||
log.debug("User has expressed that they don't want to check for updates")
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def can_update(self, cur_version: str, latest_version: str) -> bool:
|
|
||||||
if version.parse(cur_version) == version.parse(latest_version):
|
|
||||||
return False
|
|
||||||
elif version.parse(cur_version) > version.parse(latest_version):
|
|
||||||
# FIXME: This is a sanity check, but we should improve its wording.
|
|
||||||
raise Exception("Received version is older than the latest version")
|
|
||||||
else:
|
|
||||||
return True
|
|
||||||
|
|
||||||
def _get_now_timestamp(self) -> int:
|
|
||||||
return int(time.time())
|
|
||||||
|
|
||||||
def _should_postpone_update_check(self) -> bool:
|
|
||||||
"""Consult and update cooldown timer.
|
|
||||||
|
|
||||||
If the previous check happened before the cooldown period expires, do not check
|
|
||||||
again.
|
|
||||||
"""
|
|
||||||
current_time = self._get_now_timestamp()
|
|
||||||
last_check = self.dangerzone.settings.get("updater_last_check")
|
|
||||||
if current_time < last_check + UPDATE_CHECK_COOLDOWN_SECS:
|
|
||||||
log.debug("Cooling down update checks")
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def get_latest_info(self) -> UpdateReport:
|
|
||||||
"""Get the latest release info from GitHub.
|
|
||||||
|
|
||||||
Also, render the changelog from Markdown format to HTML, so that we can show it
|
|
||||||
to the users.
|
|
||||||
"""
|
|
||||||
try:
|
try:
|
||||||
res = requests.get(self.GH_RELEASE_URL, timeout=self.REQ_TIMEOUT)
|
should_check: Optional[bool] = releases.should_check_for_releases(
|
||||||
except Exception as e:
|
self.dangerzone.settings
|
||||||
raise RuntimeError(
|
|
||||||
f"Encountered an exception while checking {self.GH_RELEASE_URL}: {e}"
|
|
||||||
)
|
)
|
||||||
|
except errors.NeedUserInput:
|
||||||
if res.status_code != 200:
|
should_check = self.prompt_for_checks()
|
||||||
raise RuntimeError(
|
if should_check is not None:
|
||||||
f"Encountered an HTTP {res.status_code} error while checking"
|
self.dangerzone.settings.set(
|
||||||
f" {self.GH_RELEASE_URL}"
|
"updater_check_all", should_check, autosave=True
|
||||||
)
|
)
|
||||||
|
return bool(should_check)
|
||||||
try:
|
|
||||||
info = res.json()
|
|
||||||
except json.JSONDecodeError:
|
|
||||||
raise ValueError(f"Received a non-JSON response from {self.GH_RELEASE_URL}")
|
|
||||||
|
|
||||||
try:
|
|
||||||
version = info["tag_name"].lstrip("v")
|
|
||||||
changelog = markdown.markdown(info["body"])
|
|
||||||
except KeyError:
|
|
||||||
raise ValueError(
|
|
||||||
f"Missing required fields in JSON response from {self.GH_RELEASE_URL}"
|
|
||||||
)
|
|
||||||
|
|
||||||
return UpdateReport(version=version, changelog=changelog)
|
|
||||||
|
|
||||||
# XXX: This happens in parallel with other tasks. DO NOT alter global state!
|
|
||||||
def _check_for_updates(self) -> UpdateReport:
|
|
||||||
"""Check for updates locally and remotely.
|
|
||||||
|
|
||||||
Check for updates in two places:
|
|
||||||
|
|
||||||
1. In our settings, in case we have cached the latest version/changelog from a
|
|
||||||
previous run.
|
|
||||||
2. In GitHub, by hitting the latest releases API.
|
|
||||||
"""
|
|
||||||
log.debug("Checking for Dangerzone updates")
|
|
||||||
latest_version = self.dangerzone.settings.get("updater_latest_version")
|
|
||||||
if version.parse(get_version()) < version.parse(latest_version):
|
|
||||||
log.debug("Determined that there is an update due to cached results")
|
|
||||||
return UpdateReport(
|
|
||||||
version=latest_version,
|
|
||||||
changelog=self.dangerzone.settings.get("updater_latest_changelog"),
|
|
||||||
)
|
|
||||||
|
|
||||||
# If the previous check happened before the cooldown period expires, do not
|
|
||||||
# check again. Else, bump the last check timestamp, before making the actual
|
|
||||||
# check. This is to ensure that even failed update checks respect the cooldown
|
|
||||||
# period.
|
|
||||||
if self._should_postpone_update_check():
|
|
||||||
return UpdateReport()
|
|
||||||
else:
|
|
||||||
self.dangerzone.settings.set(
|
|
||||||
"updater_last_check", self._get_now_timestamp(), autosave=True
|
|
||||||
)
|
|
||||||
|
|
||||||
log.debug("Checking the latest GitHub release")
|
|
||||||
report = self.get_latest_info()
|
|
||||||
log.debug(f"Latest version in GitHub is {report.version}")
|
|
||||||
if report.version and self.can_update(latest_version, report.version):
|
|
||||||
log.debug(
|
|
||||||
f"Determined that there is an update due to a new GitHub version:"
|
|
||||||
f" {latest_version} < {report.version}"
|
|
||||||
)
|
|
||||||
return report
|
|
||||||
|
|
||||||
log.debug("No need to update")
|
|
||||||
return UpdateReport()
|
|
||||||
|
|
||||||
##################
|
|
||||||
# Logic for running update checks asynchronously
|
|
||||||
|
|
||||||
def check_for_updates(self) -> UpdateReport:
|
|
||||||
"""Check for updates and return a report with the findings:
|
|
||||||
|
|
||||||
There are three scenarios when we check for updates, and each scenario returns a
|
|
||||||
slightly different answer:
|
|
||||||
|
|
||||||
1. No new updates: Return an empty update report.
|
|
||||||
2. Updates are available: Return an update report with the latest version and
|
|
||||||
changelog, in HTML format.
|
|
||||||
3. Update check failed: Return an update report that holds just the error
|
|
||||||
message.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
res = self._check_for_updates()
|
|
||||||
except Exception as e:
|
|
||||||
log.exception("Encountered an error while checking for upgrades")
|
|
||||||
res = UpdateReport(error=str(e))
|
|
||||||
|
|
||||||
return res
|
|
||||||
|
|
||||||
def run(self) -> None:
|
def run(self) -> None:
|
||||||
self.finished.emit(self.check_for_updates())
|
has_updates = releases.check_for_updates(self.dangerzone.settings)
|
||||||
|
self.finished.emit(has_updates)
|
||||||
|
|
|
@ -95,7 +95,7 @@ class IsolationProvider(ABC):
|
||||||
return self.debug or getattr(sys, "dangerzone_dev", False)
|
return self.debug or getattr(sys, "dangerzone_dev", False)
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def install(self) -> bool:
|
def install(self, should_upgrade: bool, callback: Callable) -> bool:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def convert(
|
def convert(
|
||||||
|
|
|
@ -3,11 +3,20 @@ import os
|
||||||
import platform
|
import platform
|
||||||
import shlex
|
import shlex
|
||||||
import subprocess
|
import subprocess
|
||||||
from typing import List, Tuple
|
import sys
|
||||||
|
from typing import Callable, List, Optional, Tuple
|
||||||
|
|
||||||
from .. import container_utils, errors
|
from .. import container_utils, errors
|
||||||
from ..container_utils import Runtime
|
from ..container_utils import CONTAINER_NAME, Runtime
|
||||||
from ..document import Document
|
from ..document import Document
|
||||||
|
from ..updater import (
|
||||||
|
DEFAULT_PUBKEY_LOCATION,
|
||||||
|
UpdaterError,
|
||||||
|
install_local_container_tar,
|
||||||
|
is_update_available,
|
||||||
|
upgrade_container_image,
|
||||||
|
verify_local_image,
|
||||||
|
)
|
||||||
from ..util import get_resource_path, get_subprocess_startupinfo
|
from ..util import get_resource_path, get_subprocess_startupinfo
|
||||||
from .base import IsolationProvider, terminate_process_group
|
from .base import IsolationProvider, terminate_process_group
|
||||||
|
|
||||||
|
@ -94,42 +103,55 @@ class Container(IsolationProvider):
|
||||||
return security_args
|
return security_args
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def install() -> bool:
|
def install(
|
||||||
"""Install the container image tarball, or verify that it's already installed.
|
should_upgrade: bool,
|
||||||
|
callback: Optional[Callable] = sys.stdout.write,
|
||||||
Perform the following actions:
|
last_try: bool = False,
|
||||||
1. Get the tags of any locally available images that match Dangerzone's image
|
) -> bool:
|
||||||
name.
|
|
||||||
2. Get the expected image tag from the image-id.txt file.
|
|
||||||
- If this tag is present in the local images, then we can return.
|
|
||||||
- Else, prune the older container images and continue.
|
|
||||||
3. Load the image tarball and make sure it matches the expected tag.
|
|
||||||
"""
|
"""
|
||||||
old_tags = container_utils.list_image_tags()
|
Install a (local or remote) container image.
|
||||||
expected_tag = container_utils.get_expected_tag()
|
|
||||||
|
|
||||||
if expected_tag not in old_tags:
|
Use the local `container.tar` image if:
|
||||||
# Prune older container images.
|
|
||||||
log.info(
|
- No image is currently installed and `should_upgrade` is set to False
|
||||||
f"Could not find a Dangerzone container image with tag '{expected_tag}'"
|
- No image is currently installed and no upgrades are available
|
||||||
)
|
|
||||||
for tag in old_tags:
|
Upgrade to the last remote container image if:
|
||||||
tag = container_utils.CONTAINER_NAME + ":" + tag
|
|
||||||
container_utils.delete_image_tag(tag)
|
- An upgrade is available and `should_upgrade` is set to True
|
||||||
|
"""
|
||||||
|
|
||||||
|
installed_tags = container_utils.list_image_tags()
|
||||||
|
if not should_upgrade:
|
||||||
|
log.debug("Skipping container upgrade check as requested by the settings")
|
||||||
|
if not installed_tags:
|
||||||
|
install_local_container_tar()
|
||||||
else:
|
else:
|
||||||
return True
|
update_available, image_digest = is_update_available(
|
||||||
|
CONTAINER_NAME,
|
||||||
# Load the image tarball into the container runtime.
|
DEFAULT_PUBKEY_LOCATION,
|
||||||
container_utils.load_image_tarball()
|
)
|
||||||
|
if update_available and image_digest:
|
||||||
# Check that the container image has the expected image tag.
|
log.debug("Upgrading container image to %s", image_digest)
|
||||||
# See https://github.com/freedomofpress/dangerzone/issues/988 for an example
|
upgrade_container_image(
|
||||||
# where this was not the case.
|
CONTAINER_NAME,
|
||||||
new_tags = container_utils.list_image_tags()
|
image_digest,
|
||||||
if expected_tag not in new_tags:
|
DEFAULT_PUBKEY_LOCATION,
|
||||||
raise errors.ImageNotPresentException(
|
callback=callback,
|
||||||
f"Could not find expected tag '{expected_tag}' after loading the"
|
)
|
||||||
" container image tarball"
|
else:
|
||||||
|
log.debug("No update available for the container.")
|
||||||
|
if not installed_tags:
|
||||||
|
install_local_container_tar()
|
||||||
|
try:
|
||||||
|
verify_local_image(CONTAINER_NAME)
|
||||||
|
except UpdaterError:
|
||||||
|
# delete_image()
|
||||||
|
if last_try:
|
||||||
|
raise
|
||||||
|
log.debug("Container image not found, trying to install it.")
|
||||||
|
return Container.install(
|
||||||
|
should_upgrade=should_upgrade, callback=callback, last_try=True
|
||||||
)
|
)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
@ -214,6 +236,9 @@ class Container(IsolationProvider):
|
||||||
name: str,
|
name: str,
|
||||||
) -> subprocess.Popen:
|
) -> subprocess.Popen:
|
||||||
runtime = Runtime()
|
runtime = Runtime()
|
||||||
|
|
||||||
|
image_digest = container_utils.get_local_image_digest(CONTAINER_NAME)
|
||||||
|
verify_local_image(CONTAINER_NAME)
|
||||||
security_args = self.get_runtime_security_args()
|
security_args = self.get_runtime_security_args()
|
||||||
debug_args = []
|
debug_args = []
|
||||||
if self.debug:
|
if self.debug:
|
||||||
|
@ -222,9 +247,7 @@ class Container(IsolationProvider):
|
||||||
enable_stdin = ["-i"]
|
enable_stdin = ["-i"]
|
||||||
set_name = ["--name", name]
|
set_name = ["--name", name]
|
||||||
prevent_leakage_args = ["--rm"]
|
prevent_leakage_args = ["--rm"]
|
||||||
image_name = [
|
image_name = [CONTAINER_NAME + "@sha256:" + image_digest]
|
||||||
container_utils.CONTAINER_NAME + ":" + container_utils.get_expected_tag()
|
|
||||||
]
|
|
||||||
args = (
|
args = (
|
||||||
["run"]
|
["run"]
|
||||||
+ security_args
|
+ security_args
|
||||||
|
|
|
@ -36,7 +36,7 @@ class Dummy(IsolationProvider):
|
||||||
)
|
)
|
||||||
super().__init__()
|
super().__init__()
|
||||||
|
|
||||||
def install(self) -> bool:
|
def install(self, *args, **kwargs) -> bool:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
|
|
@ -18,7 +18,7 @@ log = logging.getLogger(__name__)
|
||||||
class Qubes(IsolationProvider):
|
class Qubes(IsolationProvider):
|
||||||
"""Uses a disposable qube for performing the conversion"""
|
"""Uses a disposable qube for performing the conversion"""
|
||||||
|
|
||||||
def install(self) -> bool:
|
def install(self, *args, **kwargs) -> bool:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
import platform
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import TYPE_CHECKING, Any, Dict
|
from typing import TYPE_CHECKING, Any, Dict
|
||||||
|
|
||||||
|
@ -32,7 +33,7 @@ class Settings:
|
||||||
"open": True,
|
"open": True,
|
||||||
"open_app": None,
|
"open_app": None,
|
||||||
"safe_extension": SAFE_EXTENSION,
|
"safe_extension": SAFE_EXTENSION,
|
||||||
"updater_check": None,
|
"updater_check_all": None,
|
||||||
"updater_last_check": None, # last check in UNIX epoch (secs since 1970)
|
"updater_last_check": None, # last check in UNIX epoch (secs since 1970)
|
||||||
# FIXME: How to invalidate those if they change upstream?
|
# FIXME: How to invalidate those if they change upstream?
|
||||||
"updater_latest_version": get_version(),
|
"updater_latest_version": get_version(),
|
||||||
|
|
12
dangerzone/updater/__init__.py
Normal file
12
dangerzone/updater/__init__.py
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
import logging
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
from .errors import SignatureError, UpdaterError
|
||||||
|
from .signatures import (
|
||||||
|
DEFAULT_PUBKEY_LOCATION,
|
||||||
|
install_local_container_tar,
|
||||||
|
is_update_available,
|
||||||
|
upgrade_container_image,
|
||||||
|
verify_local_image,
|
||||||
|
)
|
90
dangerzone/updater/attestations.py
Normal file
90
dangerzone/updater/attestations.py
Normal file
|
@ -0,0 +1,90 @@
|
||||||
|
import subprocess
|
||||||
|
from tempfile import NamedTemporaryFile
|
||||||
|
|
||||||
|
from . import cosign
|
||||||
|
|
||||||
|
# NOTE: You can grab the SLSA attestation for an image/tag pair with the following
|
||||||
|
# commands:
|
||||||
|
#
|
||||||
|
# IMAGE=ghcr.io/apyrgio/dangerzone/dangerzone
|
||||||
|
# TAG=20250129-0.8.0-149-gbf2f5ac
|
||||||
|
# DIGEST=$(crane digest ${IMAGE?}:${TAG?})
|
||||||
|
# ATT_MANIFEST=${IMAGE?}:${DIGEST/:/-}.att
|
||||||
|
# ATT_BLOB=${IMAGE?}@$(crane manifest ${ATT_MANIFEST?} | jq -r '.layers[0].digest')
|
||||||
|
# crane blob ${ATT_BLOB?} | jq -r '.payload' | base64 -d | jq
|
||||||
|
CUE_POLICY = r"""
|
||||||
|
// The predicateType field must match this string
|
||||||
|
predicateType: "https://slsa.dev/provenance/v0.2"
|
||||||
|
|
||||||
|
predicate: {{
|
||||||
|
// This condition verifies that the builder is the builder we
|
||||||
|
// expect and trust. The following condition can be used
|
||||||
|
// unmodified. It verifies that the builder is the container
|
||||||
|
// workflow.
|
||||||
|
builder: {{
|
||||||
|
id: =~"^https://github.com/slsa-framework/slsa-github-generator/.github/workflows/generator_container_slsa3.yml@refs/tags/v[0-9]+.[0-9]+.[0-9]+$"
|
||||||
|
}}
|
||||||
|
invocation: {{
|
||||||
|
configSource: {{
|
||||||
|
// This condition verifies the entrypoint of the workflow.
|
||||||
|
// Replace with the relative path to your workflow in your
|
||||||
|
// repository.
|
||||||
|
entryPoint: "{workflow}"
|
||||||
|
|
||||||
|
// This condition verifies that the image was generated from
|
||||||
|
// the source repository we expect. Replace this with your
|
||||||
|
// repository.
|
||||||
|
uri: =~"^git\\+https://github.com/{repository}@refs/heads/{branch}"
|
||||||
|
// Add a condition to check for a specific commit hash
|
||||||
|
digest: {{
|
||||||
|
sha1: "{commit}"
|
||||||
|
}}
|
||||||
|
}}
|
||||||
|
}}
|
||||||
|
}}
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def verify(
|
||||||
|
image_name: str,
|
||||||
|
branch: str,
|
||||||
|
commit: str,
|
||||||
|
repository: str,
|
||||||
|
workflow: str,
|
||||||
|
) -> bool:
|
||||||
|
"""
|
||||||
|
Look up the image attestation to see if the image has been built
|
||||||
|
on Github runners, and from a given repository.
|
||||||
|
"""
|
||||||
|
cosign.ensure_installed()
|
||||||
|
policy = CUE_POLICY.format(
|
||||||
|
repository=repository, workflow=workflow, commit=commit, branch=branch
|
||||||
|
)
|
||||||
|
|
||||||
|
# Put the value in files and verify with cosign
|
||||||
|
with (
|
||||||
|
NamedTemporaryFile(mode="w", suffix=".cue") as policy_f,
|
||||||
|
):
|
||||||
|
policy_f.write(policy)
|
||||||
|
policy_f.flush()
|
||||||
|
|
||||||
|
# Call cosign with the temporary file paths
|
||||||
|
cmd = [
|
||||||
|
"cosign",
|
||||||
|
"verify-attestation",
|
||||||
|
"--type",
|
||||||
|
"slsaprovenance",
|
||||||
|
"--policy",
|
||||||
|
policy_f.name,
|
||||||
|
"--certificate-oidc-issuer",
|
||||||
|
"https://token.actions.githubusercontent.com",
|
||||||
|
"--certificate-identity-regexp",
|
||||||
|
"^https://github.com/slsa-framework/slsa-github-generator/.github/workflows/generator_container_slsa3.yml@refs/tags/v[0-9]+.[0-9]+.[0-9]+$",
|
||||||
|
image_name,
|
||||||
|
]
|
||||||
|
|
||||||
|
result = subprocess.run(cmd, capture_output=True)
|
||||||
|
if result.returncode != 0:
|
||||||
|
error = result.stderr.decode()
|
||||||
|
raise Exception(f"Attestation cannot be verified. {error}")
|
||||||
|
return True
|
183
dangerzone/updater/cli.py
Normal file
183
dangerzone/updater/cli.py
Normal file
|
@ -0,0 +1,183 @@
|
||||||
|
#!/usr/bin/python
|
||||||
|
|
||||||
|
import functools
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import click
|
||||||
|
|
||||||
|
from .. import container_utils
|
||||||
|
from ..container_utils import get_runtime_name
|
||||||
|
from . import attestations, errors, log, registry, signatures
|
||||||
|
|
||||||
|
DEFAULT_REPOSITORY = "freedomofpress/dangerzone"
|
||||||
|
DEFAULT_BRANCH = "main"
|
||||||
|
DEFAULT_IMAGE_NAME = "ghcr.io/freedomofpress/dangerzone/dangerzone"
|
||||||
|
|
||||||
|
|
||||||
|
@click.group()
|
||||||
|
@click.option("--debug", is_flag=True)
|
||||||
|
@click.option("--runtime", default=get_runtime_name())
|
||||||
|
def main(debug: bool, runtime: str) -> None:
|
||||||
|
if debug:
|
||||||
|
click.echo("Debug mode enabled")
|
||||||
|
level = logging.DEBUG
|
||||||
|
else:
|
||||||
|
level = logging.INFO
|
||||||
|
logging.basicConfig(level=level)
|
||||||
|
|
||||||
|
if runtime != get_runtime_name():
|
||||||
|
click.echo(f"Using container runtime: {runtime}")
|
||||||
|
container_utils.RUNTIME_NAME = runtime
|
||||||
|
|
||||||
|
|
||||||
|
@main.command()
|
||||||
|
@click.argument("image", default=DEFAULT_IMAGE_NAME)
|
||||||
|
@click.option("--pubkey", default=signatures.DEFAULT_PUBKEY_LOCATION)
|
||||||
|
def upgrade(image: str, pubkey: str) -> None:
|
||||||
|
"""Upgrade the image to the latest signed version."""
|
||||||
|
manifest_digest = registry.get_manifest_digest(image)
|
||||||
|
|
||||||
|
try:
|
||||||
|
callback = functools.partial(click.echo, nl=False)
|
||||||
|
signatures.upgrade_container_image(image, manifest_digest, pubkey, callback)
|
||||||
|
click.echo(f"✅ The local image {image} has been upgraded")
|
||||||
|
click.echo(f"✅ The image has been signed with {pubkey}")
|
||||||
|
click.echo(f"✅ Signatures has been verified and stored locally")
|
||||||
|
|
||||||
|
except errors.ImageAlreadyUpToDate as e:
|
||||||
|
click.echo(f"✅ {e}")
|
||||||
|
raise click.Abort()
|
||||||
|
except Exception as e:
|
||||||
|
click.echo(f"❌ {e}")
|
||||||
|
raise click.Abort()
|
||||||
|
|
||||||
|
|
||||||
|
@main.command()
|
||||||
|
@click.argument("image", default=DEFAULT_IMAGE_NAME)
|
||||||
|
@click.option("--pubkey", default=signatures.DEFAULT_PUBKEY_LOCATION)
|
||||||
|
def store_signatures(image: str, pubkey: str) -> None:
|
||||||
|
manifest_digest = registry.get_manifest_digest(image)
|
||||||
|
sigs = signatures.get_remote_signatures(image, manifest_digest)
|
||||||
|
signatures.verify_signatures(sigs, manifest_digest, pubkey)
|
||||||
|
signatures.store_signatures(sigs, manifest_digest, pubkey, update_logindex=False)
|
||||||
|
click.echo(f"✅ Signatures has been verified and stored locally")
|
||||||
|
|
||||||
|
|
||||||
|
@main.command()
|
||||||
|
@click.argument("image_filename")
|
||||||
|
@click.option("--pubkey", default=signatures.DEFAULT_PUBKEY_LOCATION)
|
||||||
|
@click.option("--force", is_flag=True)
|
||||||
|
def load_archive(image_filename: str, pubkey: str, force: bool) -> None:
|
||||||
|
"""Upgrade the local image to the one in the archive."""
|
||||||
|
try:
|
||||||
|
loaded_image = signatures.upgrade_container_image_airgapped(
|
||||||
|
image_filename, pubkey, bypass_logindex=force
|
||||||
|
)
|
||||||
|
click.echo(
|
||||||
|
f"✅ Installed image {image_filename} on the system as {loaded_image}"
|
||||||
|
)
|
||||||
|
except errors.ImageAlreadyUpToDate as e:
|
||||||
|
click.echo(f"✅ {e}")
|
||||||
|
except errors.InvalidLogIndex as e:
|
||||||
|
click.echo(f"❌ Trying to install image older that the currently installed one")
|
||||||
|
raise click.Abort()
|
||||||
|
except Exception as e:
|
||||||
|
click.echo(f"❌ {e}")
|
||||||
|
raise click.Abort()
|
||||||
|
|
||||||
|
|
||||||
|
@main.command()
|
||||||
|
@click.argument("image")
|
||||||
|
@click.option("--output", default="dangerzone-airgapped.tar")
|
||||||
|
def prepare_archive(image: str, output: str) -> None:
|
||||||
|
"""Prepare an archive to upgrade the dangerzone image on an airgapped environment."""
|
||||||
|
signatures.prepare_airgapped_archive(image, output)
|
||||||
|
click.echo(f"✅ Archive {output} created")
|
||||||
|
|
||||||
|
|
||||||
|
@main.command()
|
||||||
|
@click.argument("image", default=DEFAULT_IMAGE_NAME)
|
||||||
|
@click.option("--pubkey", default=signatures.DEFAULT_PUBKEY_LOCATION)
|
||||||
|
def verify_local(image: str, pubkey: str) -> None:
|
||||||
|
"""
|
||||||
|
Verify the local image signature against a public key and the stored signatures.
|
||||||
|
"""
|
||||||
|
# XXX remove a potentiel :tag
|
||||||
|
if signatures.verify_local_image(image, pubkey):
|
||||||
|
click.echo(
|
||||||
|
(
|
||||||
|
f"Verifying the local image:\n\n"
|
||||||
|
f"pubkey: {pubkey}\n"
|
||||||
|
f"image: {image}\n\n"
|
||||||
|
f"✅ The local image {image} has been signed with {pubkey}"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@main.command()
|
||||||
|
@click.argument("image")
|
||||||
|
def list_remote_tags(image: str) -> None:
|
||||||
|
"""List the tags available for a given image."""
|
||||||
|
click.echo(f"Existing tags for {image}")
|
||||||
|
for tag in registry.list_tags(image):
|
||||||
|
click.echo(tag)
|
||||||
|
|
||||||
|
|
||||||
|
@main.command()
|
||||||
|
@click.argument("image")
|
||||||
|
def get_manifest(image: str) -> None:
|
||||||
|
"""Retrieves a remote manifest for a given image and displays it."""
|
||||||
|
click.echo(registry.get_manifest(image).content)
|
||||||
|
|
||||||
|
|
||||||
|
@main.command()
|
||||||
|
@click.argument("image_name")
|
||||||
|
# XXX: Do we really want to check against this?
|
||||||
|
@click.option(
|
||||||
|
"--branch",
|
||||||
|
default=DEFAULT_BRANCH,
|
||||||
|
help="The Git branch that the image was built from",
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"--commit",
|
||||||
|
required=True,
|
||||||
|
help="The Git commit the image was built from",
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"--repository",
|
||||||
|
default=DEFAULT_REPOSITORY,
|
||||||
|
help="The github repository to check the attestation for",
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"--workflow",
|
||||||
|
default=".github/workflows/release-container-image.yml",
|
||||||
|
help="The path of the GitHub actions workflow this image was created from",
|
||||||
|
)
|
||||||
|
def attest_provenance(
|
||||||
|
image_name: str,
|
||||||
|
branch: str,
|
||||||
|
commit: str,
|
||||||
|
repository: str,
|
||||||
|
workflow: str,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Look up the image attestation to see if the image has been built
|
||||||
|
on Github runners, and from a given repository.
|
||||||
|
"""
|
||||||
|
# TODO: Parse image and make sure it has a tag. Might even check for a digest.
|
||||||
|
# parsed = registry.parse_image_location(image)
|
||||||
|
|
||||||
|
verified = attestations.verify(image_name, branch, commit, repository, workflow)
|
||||||
|
if verified:
|
||||||
|
click.echo(
|
||||||
|
f"🎉 Successfully verified image '{image_name}' and its associated claims:"
|
||||||
|
)
|
||||||
|
click.echo(f"- ✅ SLSA Level 3 provenance")
|
||||||
|
click.echo(f"- ✅ GitHub repo: {repository}")
|
||||||
|
click.echo(f"- ✅ GitHub actions workflow: {workflow}")
|
||||||
|
click.echo(f"- ✅ Git branch: {branch}")
|
||||||
|
click.echo(f"- ✅ Git commit: {commit}")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
32
dangerzone/updater/cosign.py
Normal file
32
dangerzone/updater/cosign.py
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
from . import errors, log
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_installed() -> None:
|
||||||
|
try:
|
||||||
|
subprocess.run(["cosign", "version"], capture_output=True, check=True)
|
||||||
|
except subprocess.CalledProcessError:
|
||||||
|
raise errors.CosignNotInstalledError()
|
||||||
|
|
||||||
|
|
||||||
|
def verify_local_image(oci_image_folder: str, pubkey: str) -> bool:
|
||||||
|
"""Verify the given path against the given public key"""
|
||||||
|
|
||||||
|
ensure_installed()
|
||||||
|
cmd = [
|
||||||
|
"cosign",
|
||||||
|
"verify",
|
||||||
|
"--key",
|
||||||
|
pubkey,
|
||||||
|
"--offline",
|
||||||
|
"--local-image",
|
||||||
|
oci_image_folder,
|
||||||
|
]
|
||||||
|
log.debug(" ".join(cmd))
|
||||||
|
result = subprocess.run(cmd, capture_output=True)
|
||||||
|
if result.returncode == 0:
|
||||||
|
log.info("Signature verified")
|
||||||
|
return True
|
||||||
|
log.info("Failed to verify signature", result.stderr)
|
||||||
|
return False
|
64
dangerzone/updater/errors.py
Normal file
64
dangerzone/updater/errors.py
Normal file
|
@ -0,0 +1,64 @@
|
||||||
|
class UpdaterError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class ImageAlreadyUpToDate(UpdaterError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class ImageNotFound(UpdaterError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class SignatureError(UpdaterError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class RegistryError(UpdaterError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class AirgappedImageDownloadError(UpdaterError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class NoRemoteSignatures(SignatureError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class SignatureVerificationError(SignatureError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class SignatureExtractionError(SignatureError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class SignaturesFolderDoesNotExist(SignatureError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidSignatures(SignatureError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class SignatureMismatch(SignatureError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class LocalSignatureNotFound(SignatureError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class CosignNotInstalledError(SignatureError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidLogIndex(SignatureError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class NeedUserInput(UpdaterError):
|
||||||
|
"""The user has not yet been prompted to know if they want to check for updates."""
|
||||||
|
|
||||||
|
pass
|
139
dangerzone/updater/registry.py
Normal file
139
dangerzone/updater/registry.py
Normal file
|
@ -0,0 +1,139 @@
|
||||||
|
import re
|
||||||
|
from collections import namedtuple
|
||||||
|
from hashlib import sha256
|
||||||
|
from typing import Dict, Optional, Tuple
|
||||||
|
|
||||||
|
import requests
|
||||||
|
|
||||||
|
from .. import container_utils as runtime
|
||||||
|
from .. import errors as dzerrors
|
||||||
|
from . import errors, log
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"get_manifest_digest",
|
||||||
|
"list_tags",
|
||||||
|
"get_manifest",
|
||||||
|
"parse_image_location",
|
||||||
|
]
|
||||||
|
|
||||||
|
SIGSTORE_BUNDLE = "application/vnd.dev.sigstore.bundle.v0.3+json"
|
||||||
|
IMAGE_INDEX_MEDIA_TYPE = "application/vnd.oci.image.index.v1+json"
|
||||||
|
ACCEPT_MANIFESTS_HEADER = ",".join(
|
||||||
|
[
|
||||||
|
"application/vnd.docker.distribution.manifest.v1+json",
|
||||||
|
"application/vnd.docker.distribution.manifest.v1+prettyjws",
|
||||||
|
"application/vnd.docker.distribution.manifest.v2+json",
|
||||||
|
"application/vnd.oci.image.manifest.v1+json",
|
||||||
|
"application/vnd.docker.distribution.manifest.list.v2+json",
|
||||||
|
IMAGE_INDEX_MEDIA_TYPE,
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
Image = namedtuple("Image", ["registry", "namespace", "image_name", "tag", "digest"])
|
||||||
|
|
||||||
|
|
||||||
|
def parse_image_location(input_string: str) -> Image:
|
||||||
|
"""Parses container image location into an Image namedtuple"""
|
||||||
|
pattern = (
|
||||||
|
r"^"
|
||||||
|
r"(?P<registry>[a-zA-Z0-9.-]+)/"
|
||||||
|
r"(?P<namespace>[a-zA-Z0-9-]+)/"
|
||||||
|
r"(?P<image_name>[^:@]+)"
|
||||||
|
r"(?::(?P<tag>[a-zA-Z0-9.-]+))?"
|
||||||
|
r"(?:@(?P<digest>sha256:[a-zA-Z0-9]+))?"
|
||||||
|
r"$"
|
||||||
|
)
|
||||||
|
match = re.match(pattern, input_string)
|
||||||
|
if not match:
|
||||||
|
raise ValueError("Malformed image location")
|
||||||
|
return Image(
|
||||||
|
registry=match.group("registry"),
|
||||||
|
namespace=match.group("namespace"),
|
||||||
|
image_name=match.group("image_name"),
|
||||||
|
tag=match.group("tag") or "latest",
|
||||||
|
digest=match.group("digest"),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_auth_header(image: Image) -> Dict[str, str]:
|
||||||
|
auth_url = f"https://{image.registry}/token"
|
||||||
|
response = requests.get(
|
||||||
|
auth_url,
|
||||||
|
params={
|
||||||
|
"service": f"{image.registry}",
|
||||||
|
"scope": f"repository:{image.namespace}/{image.image_name}:pull",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
response.raise_for_status()
|
||||||
|
token = response.json()["token"]
|
||||||
|
return {"Authorization": f"Bearer {token}"}
|
||||||
|
|
||||||
|
|
||||||
|
def _url(image: Image) -> str:
|
||||||
|
return f"https://{image.registry}/v2/{image.namespace}/{image.image_name}"
|
||||||
|
|
||||||
|
|
||||||
|
def list_tags(image_str: str) -> list:
|
||||||
|
image = parse_image_location(image_str)
|
||||||
|
url = f"{_url(image)}/tags/list"
|
||||||
|
response = requests.get(url, headers=_get_auth_header(image))
|
||||||
|
response.raise_for_status()
|
||||||
|
tags = response.json().get("tags", [])
|
||||||
|
return tags
|
||||||
|
|
||||||
|
|
||||||
|
def get_manifest(image_str: str) -> requests.Response:
|
||||||
|
"""Get manifest information for a specific tag"""
|
||||||
|
image = parse_image_location(image_str)
|
||||||
|
manifest_url = f"{_url(image)}/manifests/{image.tag}"
|
||||||
|
headers = {
|
||||||
|
"Accept": ACCEPT_MANIFESTS_HEADER,
|
||||||
|
}
|
||||||
|
headers.update(_get_auth_header(image))
|
||||||
|
|
||||||
|
response = requests.get(manifest_url, headers=headers)
|
||||||
|
response.raise_for_status()
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
def list_manifests(image_str: str) -> list:
|
||||||
|
return get_manifest(image_str).json().get("manifests")
|
||||||
|
|
||||||
|
|
||||||
|
def get_blob(image: Image, digest: str) -> requests.Response:
|
||||||
|
response = requests.get(
|
||||||
|
f"{_url(image)}/blobs/{digest}", headers=_get_auth_header(image)
|
||||||
|
)
|
||||||
|
response.raise_for_status()
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
def get_manifest_digest(
|
||||||
|
image_str: str, tag_manifest_content: Optional[bytes] = None
|
||||||
|
) -> str:
|
||||||
|
if not tag_manifest_content:
|
||||||
|
tag_manifest_content = get_manifest(image_str).content
|
||||||
|
|
||||||
|
return sha256(tag_manifest_content).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
def is_new_remote_image_available(image_str: str) -> Tuple[bool, str]:
|
||||||
|
"""
|
||||||
|
Check if a new remote image is available on the registry.
|
||||||
|
"""
|
||||||
|
remote_digest = get_manifest_digest(image_str)
|
||||||
|
image = parse_image_location(image_str)
|
||||||
|
if image.digest:
|
||||||
|
local_digest = image.digest
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
local_digest = runtime.get_local_image_digest(image_str)
|
||||||
|
except dzerrors.ImageNotPresentException:
|
||||||
|
log.debug("No local image found")
|
||||||
|
return True, remote_digest
|
||||||
|
|
||||||
|
log.debug("Remote digest: %s", remote_digest)
|
||||||
|
log.debug("Local digest: %s", local_digest)
|
||||||
|
|
||||||
|
return (remote_digest != local_digest, remote_digest)
|
191
dangerzone/updater/releases.py
Normal file
191
dangerzone/updater/releases.py
Normal file
|
@ -0,0 +1,191 @@
|
||||||
|
import json
|
||||||
|
import platform
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
import markdown
|
||||||
|
import requests
|
||||||
|
from packaging import version
|
||||||
|
|
||||||
|
from .. import util
|
||||||
|
from ..settings import Settings
|
||||||
|
from . import errors, log
|
||||||
|
|
||||||
|
# Check for updates at most every 12 hours.
|
||||||
|
UPDATE_CHECK_COOLDOWN_SECS = 60 * 60 * 12
|
||||||
|
|
||||||
|
GH_RELEASE_URL = (
|
||||||
|
"https://api.github.com/repos/freedomofpress/dangerzone/releases/latest"
|
||||||
|
)
|
||||||
|
REQ_TIMEOUT = 15
|
||||||
|
|
||||||
|
|
||||||
|
class UpdateReport:
|
||||||
|
"""A report for an update check."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
version: Optional[str] = None,
|
||||||
|
changelog: Optional[str] = None,
|
||||||
|
error: Optional[str] = None,
|
||||||
|
):
|
||||||
|
self.version = version
|
||||||
|
self.changelog = changelog
|
||||||
|
self.error = error
|
||||||
|
|
||||||
|
def empty(self) -> bool:
|
||||||
|
return self.version is None and self.changelog is None and self.error is None
|
||||||
|
|
||||||
|
|
||||||
|
def _get_now_timestamp() -> int:
|
||||||
|
return int(time.time())
|
||||||
|
|
||||||
|
|
||||||
|
def _should_postpone_update_check(settings) -> bool:
|
||||||
|
"""Consult and update cooldown timer.
|
||||||
|
|
||||||
|
If the previous check happened before the cooldown period expires, do not check
|
||||||
|
again.
|
||||||
|
"""
|
||||||
|
current_time = _get_now_timestamp()
|
||||||
|
last_check = settings.get("updater_last_check")
|
||||||
|
if current_time < last_check + UPDATE_CHECK_COOLDOWN_SECS:
|
||||||
|
log.debug("Cooling down update checks")
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def ensure_sane_update(cur_version: str, latest_version: str) -> bool:
|
||||||
|
if version.parse(cur_version) == version.parse(latest_version):
|
||||||
|
return False
|
||||||
|
elif version.parse(cur_version) > version.parse(latest_version):
|
||||||
|
# FIXME: This is a sanity check, but we should improve its wording.
|
||||||
|
raise Exception("Received version is older than the latest version")
|
||||||
|
else:
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_release_info() -> UpdateReport:
|
||||||
|
"""Get the latest release info from GitHub.
|
||||||
|
|
||||||
|
Also, render the changelog from Markdown format to HTML, so that we can show it
|
||||||
|
to the users.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
res = requests.get(GH_RELEASE_URL, timeout=REQ_TIMEOUT)
|
||||||
|
except Exception as e:
|
||||||
|
raise RuntimeError(
|
||||||
|
f"Encountered an exception while checking {GH_RELEASE_URL}: {e}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if res.status_code != 200:
|
||||||
|
raise RuntimeError(
|
||||||
|
f"Encountered an HTTP {res.status_code} error while checking"
|
||||||
|
f" {GH_RELEASE_URL}"
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
info = res.json()
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
raise ValueError(f"Received a non-JSON response from {GH_RELEASE_URL}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
version = info["tag_name"].lstrip("v")
|
||||||
|
changelog = markdown.markdown(info["body"])
|
||||||
|
except KeyError:
|
||||||
|
raise ValueError(
|
||||||
|
f"Missing required fields in JSON response from {GH_RELEASE_URL}"
|
||||||
|
)
|
||||||
|
|
||||||
|
return UpdateReport(version=version, changelog=changelog)
|
||||||
|
|
||||||
|
|
||||||
|
def should_check_for_releases(settings: Settings) -> bool:
|
||||||
|
"""Determine if we can check for release updates based on settings and user prefs.
|
||||||
|
|
||||||
|
Note that this method only checks if the user has expressed an interest for
|
||||||
|
learning about new updates, and not whether we should actually make an update
|
||||||
|
check. Those two things are distinct, actually. For example:
|
||||||
|
|
||||||
|
* A user may have expressed that they want to learn about new updates.
|
||||||
|
* A previous update check may have found out that there's a new version out.
|
||||||
|
* Thus we will always show to the user the cached info about the new version,
|
||||||
|
and won't make a new update check.
|
||||||
|
"""
|
||||||
|
check = settings.get("updater_check_all")
|
||||||
|
|
||||||
|
log.debug("Checking platform type")
|
||||||
|
# TODO: Disable updates for Homebrew installations.
|
||||||
|
if platform.system() == "Linux" and not getattr(sys, "dangerzone_dev", False):
|
||||||
|
log.debug("Running on Linux, disabling updates")
|
||||||
|
if not check: # if not overidden by user
|
||||||
|
settings.set("updater_check_all", False, autosave=True)
|
||||||
|
return False
|
||||||
|
|
||||||
|
log.debug("Checking if first run of Dangerzone")
|
||||||
|
if settings.get("updater_last_check") is None:
|
||||||
|
log.debug("Dangerzone is running for the first time, updates are stalled")
|
||||||
|
settings.set("updater_last_check", 0, autosave=True)
|
||||||
|
return False
|
||||||
|
|
||||||
|
log.debug("Checking if user has already expressed their preference")
|
||||||
|
if check is None:
|
||||||
|
log.debug("User has not been asked yet for update checks")
|
||||||
|
raise errors.NeedUserInput()
|
||||||
|
elif not check:
|
||||||
|
log.debug("User has expressed that they don't want to check for updates")
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def check_for_updates(settings) -> UpdateReport:
|
||||||
|
"""Check for updates locally and remotely.
|
||||||
|
|
||||||
|
Check for updates (locally and remotely) and return a report with the findings:
|
||||||
|
|
||||||
|
There are three scenarios when we check for updates, and each scenario returns a
|
||||||
|
slightly different answer:
|
||||||
|
|
||||||
|
1. No new updates: Return an empty update report.
|
||||||
|
2. Updates are available: Return an update report with the latest version and
|
||||||
|
changelog, in HTML format.
|
||||||
|
3. Update check failed: Return an update report that holds just the error
|
||||||
|
message.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
log.debug("Checking for Dangerzone updates")
|
||||||
|
latest_version = settings.get("updater_latest_version")
|
||||||
|
if version.parse(util.get_version()) < version.parse(latest_version):
|
||||||
|
log.debug("Determined that there is an update due to cached results")
|
||||||
|
return UpdateReport(
|
||||||
|
version=latest_version,
|
||||||
|
changelog=settings.get("updater_latest_changelog"),
|
||||||
|
)
|
||||||
|
|
||||||
|
# If the previous check happened before the cooldown period expires, do not
|
||||||
|
# check again. Else, bump the last check timestamp, before making the actual
|
||||||
|
# check. This is to ensure that even failed update checks respect the cooldown
|
||||||
|
# period.
|
||||||
|
if _should_postpone_update_check(settings):
|
||||||
|
return UpdateReport()
|
||||||
|
else:
|
||||||
|
settings.set("updater_last_check", _get_now_timestamp(), autosave=True)
|
||||||
|
|
||||||
|
log.debug("Checking the latest GitHub release")
|
||||||
|
report = fetch_release_info()
|
||||||
|
log.debug(f"Latest version in GitHub is {report.version}")
|
||||||
|
if report.version and ensure_sane_update(latest_version, report.version):
|
||||||
|
log.debug(
|
||||||
|
f"Determined that there is an update due to a new GitHub version:"
|
||||||
|
f" {latest_version} < {report.version}"
|
||||||
|
)
|
||||||
|
return report
|
||||||
|
|
||||||
|
log.debug("No need to update")
|
||||||
|
return UpdateReport()
|
||||||
|
except Exception as e:
|
||||||
|
log.exception("Encountered an error while checking for upgrades")
|
||||||
|
return UpdateReport(error=str(e))
|
518
dangerzone/updater/signatures.py
Normal file
518
dangerzone/updater/signatures.py
Normal file
|
@ -0,0 +1,518 @@
|
||||||
|
import json
|
||||||
|
import platform
|
||||||
|
import re
|
||||||
|
import subprocess
|
||||||
|
import tarfile
|
||||||
|
from base64 import b64decode, b64encode
|
||||||
|
from functools import reduce
|
||||||
|
from hashlib import sha256
|
||||||
|
from io import BytesIO
|
||||||
|
from pathlib import Path
|
||||||
|
from tempfile import NamedTemporaryFile, TemporaryDirectory
|
||||||
|
from typing import Callable, Dict, List, Optional, Tuple
|
||||||
|
|
||||||
|
from .. import container_utils as runtime
|
||||||
|
from .. import errors as dzerrors
|
||||||
|
from ..util import get_resource_path
|
||||||
|
from . import cosign, errors, log, registry
|
||||||
|
|
||||||
|
try:
|
||||||
|
import platformdirs
|
||||||
|
except ImportError:
|
||||||
|
import appdirs as platformdirs # type: ignore[no-redef]
|
||||||
|
|
||||||
|
|
||||||
|
def appdata_dir() -> Path:
|
||||||
|
return Path(platformdirs.user_data_dir("dangerzone"))
|
||||||
|
|
||||||
|
|
||||||
|
# RELEASE: Bump this value to the log index of the latest signature
|
||||||
|
# to ensures the software can't upgrade to container images that predates it.
|
||||||
|
DEFAULT_LOG_INDEX = 0
|
||||||
|
|
||||||
|
# XXX Store this somewhere else.
|
||||||
|
DEFAULT_PUBKEY_LOCATION = get_resource_path("freedomofpress-dangerzone-pub.key")
|
||||||
|
SIGNATURES_PATH = appdata_dir() / "signatures"
|
||||||
|
LAST_LOG_INDEX = SIGNATURES_PATH / "last_log_index"
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"verify_signature",
|
||||||
|
"load_and_verify_signatures",
|
||||||
|
"store_signatures",
|
||||||
|
"verify_offline_image_signature",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def signature_to_bundle(sig: Dict) -> Dict:
|
||||||
|
"""Convert a cosign-download signature to the format expected by cosign bundle."""
|
||||||
|
bundle = sig["Bundle"]
|
||||||
|
payload = bundle["Payload"]
|
||||||
|
return {
|
||||||
|
"base64Signature": sig["Base64Signature"],
|
||||||
|
"Payload": sig["Payload"],
|
||||||
|
"cert": sig["Cert"],
|
||||||
|
"chain": sig["Chain"],
|
||||||
|
"rekorBundle": {
|
||||||
|
"SignedEntryTimestamp": bundle["SignedEntryTimestamp"],
|
||||||
|
"Payload": {
|
||||||
|
"body": payload["body"],
|
||||||
|
"integratedTime": payload["integratedTime"],
|
||||||
|
"logIndex": payload["logIndex"],
|
||||||
|
"logID": payload["logID"],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"RFC3161Timestamp": sig["RFC3161Timestamp"],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def verify_signature(signature: dict, image_digest: str, pubkey: str | Path) -> None:
|
||||||
|
"""
|
||||||
|
Verifies that:
|
||||||
|
|
||||||
|
- the signature has been signed by the given public key
|
||||||
|
- the signature matches the given image digest
|
||||||
|
"""
|
||||||
|
# XXX - Also verify the identity/docker-reference field against the expected value
|
||||||
|
# e.g. ghcr.io/freedomofpress/dangerzone/dangerzone
|
||||||
|
|
||||||
|
cosign.ensure_installed()
|
||||||
|
signature_bundle = signature_to_bundle(signature)
|
||||||
|
try:
|
||||||
|
payload_bytes = b64decode(signature_bundle["Payload"])
|
||||||
|
payload_digest = json.loads(payload_bytes)["critical"]["image"][
|
||||||
|
"docker-manifest-digest"
|
||||||
|
]
|
||||||
|
except Exception as e:
|
||||||
|
raise errors.SignatureVerificationError(
|
||||||
|
f"Unable to extract the payload digest from the signature: {e}"
|
||||||
|
)
|
||||||
|
if payload_digest != f"sha256:{image_digest}":
|
||||||
|
raise errors.SignatureMismatch(
|
||||||
|
"The given signature does not match the expected image digest "
|
||||||
|
f"({payload_digest}, {image_digest})"
|
||||||
|
)
|
||||||
|
|
||||||
|
with (
|
||||||
|
NamedTemporaryFile(mode="w") as signature_file,
|
||||||
|
NamedTemporaryFile(mode="bw") as payload_file,
|
||||||
|
):
|
||||||
|
json.dump(signature_bundle, signature_file)
|
||||||
|
signature_file.flush()
|
||||||
|
|
||||||
|
payload_file.write(payload_bytes)
|
||||||
|
payload_file.flush()
|
||||||
|
|
||||||
|
if isinstance(pubkey, str):
|
||||||
|
pubkey = Path(pubkey)
|
||||||
|
|
||||||
|
cmd = [
|
||||||
|
"cosign",
|
||||||
|
"verify-blob",
|
||||||
|
"--key",
|
||||||
|
str(pubkey.absolute()),
|
||||||
|
"--bundle",
|
||||||
|
signature_file.name,
|
||||||
|
payload_file.name,
|
||||||
|
]
|
||||||
|
log.debug(" ".join(cmd))
|
||||||
|
result = subprocess.run(cmd, capture_output=True)
|
||||||
|
if result.returncode != 0 or result.stderr != b"Verified OK\n":
|
||||||
|
log.debug("Failed to verify signature", result.stderr)
|
||||||
|
raise errors.SignatureVerificationError("Failed to verify signature")
|
||||||
|
log.debug("Signature verified")
|
||||||
|
|
||||||
|
|
||||||
|
class Signature:
|
||||||
|
def __init__(self, signature: Dict):
|
||||||
|
self.signature = signature
|
||||||
|
|
||||||
|
@property
|
||||||
|
def payload(self) -> Dict:
|
||||||
|
return json.loads(b64decode(self.signature["Payload"]))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def manifest_digest(self) -> str:
|
||||||
|
full_digest = self.payload["critical"]["image"]["docker-manifest-digest"]
|
||||||
|
return full_digest.replace("sha256:", "")
|
||||||
|
|
||||||
|
|
||||||
|
def is_update_available(image_str: str, pubkey: str) -> Tuple[bool, Optional[str]]:
|
||||||
|
"""
|
||||||
|
Check if a new image is available, doing all the necessary checks ensuring it
|
||||||
|
would be safe to upgrade.
|
||||||
|
"""
|
||||||
|
new_image_available, remote_digest = registry.is_new_remote_image_available(
|
||||||
|
image_str
|
||||||
|
)
|
||||||
|
if not new_image_available:
|
||||||
|
return False, None
|
||||||
|
|
||||||
|
try:
|
||||||
|
check_signatures_and_logindex(image_str, remote_digest, pubkey)
|
||||||
|
return True, remote_digest
|
||||||
|
except errors.InvalidLogIndex:
|
||||||
|
return False, None
|
||||||
|
|
||||||
|
|
||||||
|
def check_signatures_and_logindex(
|
||||||
|
image_str: str, remote_digest: str, pubkey: str
|
||||||
|
) -> list[Dict]:
|
||||||
|
signatures = get_remote_signatures(image_str, remote_digest)
|
||||||
|
verify_signatures(signatures, remote_digest, pubkey)
|
||||||
|
|
||||||
|
incoming_log_index = get_log_index_from_signatures(signatures)
|
||||||
|
last_log_index = get_last_log_index()
|
||||||
|
|
||||||
|
if incoming_log_index < last_log_index:
|
||||||
|
raise errors.InvalidLogIndex(
|
||||||
|
f"The incoming log index ({incoming_log_index}) is "
|
||||||
|
f"lower than the last known log index ({last_log_index})"
|
||||||
|
)
|
||||||
|
return signatures
|
||||||
|
|
||||||
|
|
||||||
|
def verify_signatures(
|
||||||
|
signatures: List[Dict],
|
||||||
|
image_digest: str,
|
||||||
|
pubkey: str,
|
||||||
|
) -> bool:
|
||||||
|
if len(signatures) < 1:
|
||||||
|
raise errors.SignatureVerificationError("No signatures found")
|
||||||
|
|
||||||
|
for signature in signatures:
|
||||||
|
verify_signature(signature, image_digest, pubkey)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def get_last_log_index() -> int:
|
||||||
|
SIGNATURES_PATH.mkdir(parents=True, exist_ok=True)
|
||||||
|
if not LAST_LOG_INDEX.exists():
|
||||||
|
return DEFAULT_LOG_INDEX
|
||||||
|
|
||||||
|
with open(LAST_LOG_INDEX) as f:
|
||||||
|
return int(f.read())
|
||||||
|
|
||||||
|
|
||||||
|
def get_log_index_from_signatures(signatures: List[Dict]) -> int:
|
||||||
|
def _reducer(accumulator: int, signature: Dict) -> int:
|
||||||
|
try:
|
||||||
|
logIndex = int(signature["Bundle"]["Payload"]["logIndex"])
|
||||||
|
except (KeyError, ValueError):
|
||||||
|
return accumulator
|
||||||
|
return max(accumulator, logIndex)
|
||||||
|
|
||||||
|
return reduce(_reducer, signatures, 0)
|
||||||
|
|
||||||
|
|
||||||
|
def write_log_index(log_index: int) -> None:
|
||||||
|
last_log_index_path = SIGNATURES_PATH / "last_log_index"
|
||||||
|
|
||||||
|
with open(last_log_index_path, "w") as f:
|
||||||
|
f.write(str(log_index))
|
||||||
|
|
||||||
|
|
||||||
|
def _get_blob(tmpdir: str, digest: str) -> Path:
|
||||||
|
return Path(tmpdir) / "blobs" / "sha256" / digest.replace("sha256:", "")
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade_container_image_airgapped(
|
||||||
|
container_tar: str, pubkey: str, bypass_logindex: bool = False
|
||||||
|
) -> str:
|
||||||
|
"""
|
||||||
|
Verify the given archive against its self-contained signatures, then
|
||||||
|
upgrade the image and retag it to the expected tag.
|
||||||
|
|
||||||
|
Right now, the archive is extracted and reconstructed, requiring some space
|
||||||
|
on the filesystem.
|
||||||
|
|
||||||
|
:return: The loaded image name
|
||||||
|
"""
|
||||||
|
|
||||||
|
# XXX Use a memory buffer instead of the filesystem
|
||||||
|
with TemporaryDirectory() as tmpdir:
|
||||||
|
|
||||||
|
def _get_signature_filename(manifests: List[Dict]) -> Path:
|
||||||
|
for manifest in manifests:
|
||||||
|
if (
|
||||||
|
manifest["annotations"].get("kind")
|
||||||
|
== "dev.cosignproject.cosign/sigs"
|
||||||
|
):
|
||||||
|
return _get_blob(tmpdir, manifest["digest"])
|
||||||
|
raise errors.SignatureExtractionError()
|
||||||
|
|
||||||
|
with tarfile.open(container_tar, "r") as archive:
|
||||||
|
archive.extractall(tmpdir)
|
||||||
|
|
||||||
|
if not cosign.verify_local_image(tmpdir, pubkey):
|
||||||
|
raise errors.SignatureVerificationError()
|
||||||
|
|
||||||
|
# Remove the signatures from the archive, otherwise podman is not able to load it
|
||||||
|
with open(Path(tmpdir) / "index.json") as f:
|
||||||
|
index_json = json.load(f)
|
||||||
|
|
||||||
|
signature_filename = _get_signature_filename(index_json["manifests"])
|
||||||
|
|
||||||
|
index_json["manifests"] = [
|
||||||
|
manifest
|
||||||
|
for manifest in index_json["manifests"]
|
||||||
|
if manifest["annotations"].get("kind")
|
||||||
|
in ("dev.cosignproject.cosign/imageIndex", "dev.cosignproject.cosign/image")
|
||||||
|
]
|
||||||
|
|
||||||
|
with open(signature_filename, "r") as f:
|
||||||
|
image_name, signatures = convert_oci_images_signatures(json.load(f), tmpdir)
|
||||||
|
log.info(f"Found image name: {image_name}")
|
||||||
|
|
||||||
|
if not bypass_logindex:
|
||||||
|
# Ensure that we only upgrade if the log index is higher than the last known one
|
||||||
|
incoming_log_index = get_log_index_from_signatures(signatures)
|
||||||
|
last_log_index = get_last_log_index()
|
||||||
|
|
||||||
|
if incoming_log_index < last_log_index:
|
||||||
|
raise errors.InvalidLogIndex(
|
||||||
|
"The log index is not higher than the last known one"
|
||||||
|
)
|
||||||
|
|
||||||
|
image_digest = index_json["manifests"][0].get("digest").replace("sha256:", "")
|
||||||
|
|
||||||
|
# Write the new index.json to the temp folder
|
||||||
|
with open(Path(tmpdir) / "index.json", "w") as f:
|
||||||
|
json.dump(index_json, f)
|
||||||
|
|
||||||
|
with NamedTemporaryFile(suffix=".tar") as temporary_tar:
|
||||||
|
with tarfile.open(temporary_tar.name, "w") as archive:
|
||||||
|
# The root is the tmpdir
|
||||||
|
archive.add(Path(tmpdir) / "index.json", arcname="index.json")
|
||||||
|
archive.add(Path(tmpdir) / "oci-layout", arcname="oci-layout")
|
||||||
|
archive.add(Path(tmpdir) / "blobs", arcname="blobs")
|
||||||
|
|
||||||
|
runtime.load_image_tarball_from_tar(temporary_tar.name)
|
||||||
|
runtime.tag_image_by_digest(image_digest, image_name)
|
||||||
|
|
||||||
|
store_signatures(signatures, image_digest, pubkey)
|
||||||
|
return image_name
|
||||||
|
|
||||||
|
|
||||||
|
def convert_oci_images_signatures(
|
||||||
|
signatures_manifest: Dict, tmpdir: str
|
||||||
|
) -> Tuple[str, List[Dict]]:
|
||||||
|
def _to_cosign_signature(layer: Dict) -> Dict:
|
||||||
|
signature = layer["annotations"]["dev.cosignproject.cosign/signature"]
|
||||||
|
bundle = json.loads(layer["annotations"]["dev.sigstore.cosign/bundle"])
|
||||||
|
payload_body = json.loads(b64decode(bundle["Payload"]["body"]))
|
||||||
|
|
||||||
|
payload_location = _get_blob(tmpdir, layer["digest"])
|
||||||
|
with open(payload_location, "rb") as f:
|
||||||
|
payload_b64 = b64encode(f.read()).decode()
|
||||||
|
|
||||||
|
return {
|
||||||
|
"Base64Signature": payload_body["spec"]["signature"]["content"],
|
||||||
|
"Payload": payload_b64,
|
||||||
|
"Cert": None,
|
||||||
|
"Chain": None,
|
||||||
|
"Bundle": bundle,
|
||||||
|
"RFC3161Timestamp": None,
|
||||||
|
}
|
||||||
|
|
||||||
|
layers = signatures_manifest.get("layers", [])
|
||||||
|
signatures = [_to_cosign_signature(layer) for layer in layers]
|
||||||
|
|
||||||
|
if not signatures:
|
||||||
|
raise errors.SignatureExtractionError()
|
||||||
|
|
||||||
|
payload_location = _get_blob(tmpdir, layers[0]["digest"])
|
||||||
|
with open(payload_location, "r") as f:
|
||||||
|
payload = json.load(f)
|
||||||
|
image_name = payload["critical"]["identity"]["docker-reference"]
|
||||||
|
|
||||||
|
return image_name, signatures
|
||||||
|
|
||||||
|
|
||||||
|
def get_file_digest(file: Optional[str] = None, content: Optional[bytes] = None) -> str:
|
||||||
|
"""Get the sha256 digest of a file or content"""
|
||||||
|
if not file and not content:
|
||||||
|
raise errors.UpdaterError("No file or content provided")
|
||||||
|
if file:
|
||||||
|
with open(file, "rb") as f:
|
||||||
|
content = f.read()
|
||||||
|
if content:
|
||||||
|
return sha256(content).hexdigest()
|
||||||
|
return ""
|
||||||
|
|
||||||
|
|
||||||
|
def load_and_verify_signatures(
|
||||||
|
image_digest: str,
|
||||||
|
pubkey: str,
|
||||||
|
bypass_verification: bool = False,
|
||||||
|
signatures_path: Optional[Path] = None,
|
||||||
|
) -> List[Dict]:
|
||||||
|
"""
|
||||||
|
Load signatures from the local filesystem
|
||||||
|
|
||||||
|
See store_signatures() for the expected format.
|
||||||
|
"""
|
||||||
|
if not signatures_path:
|
||||||
|
signatures_path = SIGNATURES_PATH
|
||||||
|
|
||||||
|
pubkey_signatures = signatures_path / get_file_digest(pubkey)
|
||||||
|
if not pubkey_signatures.exists():
|
||||||
|
msg = (
|
||||||
|
f"Cannot find a '{pubkey_signatures}' folder. "
|
||||||
|
"You might need to download the image signatures first."
|
||||||
|
)
|
||||||
|
raise errors.SignaturesFolderDoesNotExist(msg)
|
||||||
|
|
||||||
|
signatures_file = pubkey_signatures / f"{image_digest}.json"
|
||||||
|
|
||||||
|
if not signatures_file.exists():
|
||||||
|
msg = (
|
||||||
|
f"Cannot find a '{signatures_file}' file. "
|
||||||
|
"You might need to download the image signatures first."
|
||||||
|
)
|
||||||
|
raise errors.LocalSignatureNotFound(msg)
|
||||||
|
|
||||||
|
with open(signatures_file) as f:
|
||||||
|
log.debug("Loading signatures from %s", f.name)
|
||||||
|
signatures = json.load(f)
|
||||||
|
|
||||||
|
if not bypass_verification:
|
||||||
|
verify_signatures(signatures, image_digest, pubkey)
|
||||||
|
|
||||||
|
return signatures
|
||||||
|
|
||||||
|
|
||||||
|
def store_signatures(
|
||||||
|
signatures: list[Dict], image_digest: str, pubkey: str, update_logindex: bool = True
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Store signatures locally in the SIGNATURE_PATH folder, like this:
|
||||||
|
|
||||||
|
~/.config/dangerzone/signatures/
|
||||||
|
├── <pubkey-digest>
|
||||||
|
│ ├── <image-digest>.json
|
||||||
|
│ ├── <image-digest>.json
|
||||||
|
└── last_log_index
|
||||||
|
|
||||||
|
The last_log_index file is used to keep track of the last log index
|
||||||
|
processed by the updater.
|
||||||
|
|
||||||
|
The format used in the `.json` file is the one of `cosign download
|
||||||
|
signature`, which differs from the "bundle" one used afterwards.
|
||||||
|
|
||||||
|
It can be converted to the one expected by cosign verify --bundle with
|
||||||
|
the `signature_to_bundle()` function.
|
||||||
|
|
||||||
|
This function must be used only if the provided signatures have been verified.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _get_digest(sig: Dict) -> str:
|
||||||
|
payload = json.loads(b64decode(sig["Payload"]))
|
||||||
|
return payload["critical"]["image"]["docker-manifest-digest"]
|
||||||
|
|
||||||
|
# All the signatures should share the same digest.
|
||||||
|
digests = list(map(_get_digest, signatures))
|
||||||
|
if len(set(digests)) != 1:
|
||||||
|
raise errors.InvalidSignatures("Signatures do not share the same image digest")
|
||||||
|
|
||||||
|
if f"sha256:{image_digest}" != digests[0]:
|
||||||
|
raise errors.SignatureMismatch(
|
||||||
|
f"Signatures do not match the given image digest (sha256:{image_digest}, {digests[0]})"
|
||||||
|
)
|
||||||
|
|
||||||
|
pubkey_signatures = SIGNATURES_PATH / get_file_digest(pubkey)
|
||||||
|
pubkey_signatures.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
with open(pubkey_signatures / f"{image_digest}.json", "w") as f:
|
||||||
|
log.info(
|
||||||
|
f"Storing signatures for {image_digest} in {pubkey_signatures}/{image_digest}.json"
|
||||||
|
)
|
||||||
|
json.dump(signatures, f)
|
||||||
|
|
||||||
|
if update_logindex:
|
||||||
|
write_log_index(get_log_index_from_signatures(signatures))
|
||||||
|
|
||||||
|
|
||||||
|
def verify_local_image(image: str, pubkey: str = DEFAULT_PUBKEY_LOCATION) -> bool:
|
||||||
|
"""
|
||||||
|
Verifies that a local image has a valid signature
|
||||||
|
"""
|
||||||
|
log.info(f"Verifying local image {image} against pubkey {pubkey}")
|
||||||
|
try:
|
||||||
|
image_digest = runtime.get_local_image_digest(image)
|
||||||
|
except subprocess.CalledProcessError:
|
||||||
|
raise errors.ImageNotFound(f"The image {image} does not exist locally")
|
||||||
|
|
||||||
|
log.debug(f"Image digest: {image_digest}")
|
||||||
|
load_and_verify_signatures(image_digest, pubkey)
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def get_remote_signatures(image: str, digest: str) -> List[Dict]:
|
||||||
|
"""Retrieve the signatures from the registry, via `cosign download signatures`."""
|
||||||
|
cosign.ensure_installed()
|
||||||
|
|
||||||
|
try:
|
||||||
|
process = subprocess.run(
|
||||||
|
["cosign", "download", "signature", f"{image}@sha256:{digest}"],
|
||||||
|
capture_output=True,
|
||||||
|
check=True,
|
||||||
|
)
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
raise errors.NoRemoteSignatures(e)
|
||||||
|
|
||||||
|
# Remove the last return, split on newlines, convert from JSON
|
||||||
|
signatures_raw = process.stdout.decode("utf-8").strip().split("\n")
|
||||||
|
signatures = list(filter(bool, map(json.loads, signatures_raw)))
|
||||||
|
if len(signatures) < 1:
|
||||||
|
raise errors.NoRemoteSignatures("No signatures found for the image")
|
||||||
|
return signatures
|
||||||
|
|
||||||
|
|
||||||
|
def prepare_airgapped_archive(image_name: str, destination: str) -> None:
|
||||||
|
if "@sha256:" not in image_name:
|
||||||
|
raise errors.AirgappedImageDownloadError(
|
||||||
|
"The image name must include a digest, e.g. ghcr.io/freedomofpress/dangerzone/dangerzone@sha256:123456"
|
||||||
|
)
|
||||||
|
|
||||||
|
cosign.ensure_installed()
|
||||||
|
|
||||||
|
# Get the image from the registry
|
||||||
|
with TemporaryDirectory() as tmpdir:
|
||||||
|
msg = f"Downloading image {image_name}. \nIt might take a while."
|
||||||
|
log.info(msg)
|
||||||
|
|
||||||
|
process = subprocess.run(
|
||||||
|
["cosign", "save", image_name, "--dir", tmpdir],
|
||||||
|
capture_output=True,
|
||||||
|
check=True,
|
||||||
|
)
|
||||||
|
if process.returncode != 0:
|
||||||
|
raise errors.AirgappedImageDownloadError()
|
||||||
|
|
||||||
|
with tarfile.open(destination, "w") as archive:
|
||||||
|
archive.add(tmpdir, arcname=".")
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade_container_image(
|
||||||
|
image: str, manifest_digest: str, pubkey: str, callback: Optional[Callable] = None
|
||||||
|
) -> str:
|
||||||
|
"""Verify and upgrade the image to the latest, if signed."""
|
||||||
|
update_available, remote_digest = registry.is_new_remote_image_available(image)
|
||||||
|
if not update_available:
|
||||||
|
raise errors.ImageAlreadyUpToDate("The image is already up to date")
|
||||||
|
|
||||||
|
signatures = check_signatures_and_logindex(image, remote_digest, pubkey)
|
||||||
|
runtime.container_pull(image, manifest_digest, callback=callback)
|
||||||
|
|
||||||
|
# Store the signatures just now to avoid storing them unverified
|
||||||
|
store_signatures(signatures, manifest_digest, pubkey)
|
||||||
|
return manifest_digest
|
||||||
|
|
||||||
|
|
||||||
|
def install_local_container_tar(
|
||||||
|
pubkey: Optional[str] = DEFAULT_PUBKEY_LOCATION,
|
||||||
|
) -> None:
|
||||||
|
tarball_path = get_resource_path("container.tar")
|
||||||
|
log.debug("Installing container image %s", tarball_path)
|
||||||
|
upgrade_container_image_airgapped(tarball_path, pubkey)
|
|
@ -69,6 +69,7 @@ def get_tessdata_dir() -> Path:
|
||||||
|
|
||||||
|
|
||||||
def get_version() -> str:
|
def get_version() -> str:
|
||||||
|
"""Returns the Dangerzone version string."""
|
||||||
try:
|
try:
|
||||||
with get_resource_path("version.txt").open() as f:
|
with get_resource_path("version.txt").open() as f:
|
||||||
version = f.read().strip()
|
version = f.read().strip()
|
||||||
|
|
13
dev_scripts/dangerzone-image
Executable file
13
dev_scripts/dangerzone-image
Executable file
|
@ -0,0 +1,13 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
# Load dangerzone module and resources from the source code tree
|
||||||
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||||
|
sys.dangerzone_dev = True
|
||||||
|
|
||||||
|
from dangerzone.updater import cli
|
||||||
|
|
||||||
|
cli.main()
|
|
@ -156,7 +156,7 @@ def parse_buildkit_args(args, runtime: str) -> str:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
if runtime != "podman":
|
if runtime != "podman":
|
||||||
raise RuntimeError("Cannot specify BuildKit arguments using the Podman runtime")
|
raise RuntimeError("Cannot specify BuildKit arguments using the Docker runtime")
|
||||||
|
|
||||||
return shlex.split(args.buildkit_args)
|
return shlex.split(args.buildkit_args)
|
||||||
|
|
||||||
|
|
83
docs/developer/independent-container-updates.md
Normal file
83
docs/developer/independent-container-updates.md
Normal file
|
@ -0,0 +1,83 @@
|
||||||
|
# Independent Container Updates
|
||||||
|
|
||||||
|
Since version 0.9.0, Dangerzone is able to ship container images independently
|
||||||
|
from releases of the software.
|
||||||
|
|
||||||
|
One of the main benefits of doing so is to shorten the time neede to distribute the security fixes for the containers. Being the place where the actual conversion of documents happen, it's a way to keep dangerzone users secure.
|
||||||
|
|
||||||
|
If you are a dangerzone user, this all happens behind the curtain, and you should not have to know anything about that to enjoy these "in-app" updates. If you are using dangerzone in an air-gapped environment, check the sections below.
|
||||||
|
|
||||||
|
## Checking attestations
|
||||||
|
|
||||||
|
Each night, new images are built and pushed to the container registry, alongside
|
||||||
|
with a provenance attestation, enabling anybody to ensure that the image has
|
||||||
|
been originally built by Github CI runners, from a defined source repository (in our case `freedomofpress/dangerzone`).
|
||||||
|
|
||||||
|
To verify the attestations against our expectations, use the following command:
|
||||||
|
```bash
|
||||||
|
dangerzone-image attest-provenance ghcr.io/freedomofpress/dangerzone/dangerzone --repository freedomofpress/dangerzone
|
||||||
|
```
|
||||||
|
|
||||||
|
In case of sucess, it will report back:
|
||||||
|
|
||||||
|
```
|
||||||
|
🎉 Successfully verified image
|
||||||
|
'ghcr.io/freedomofpress/dangerzone/dangerzone:<tag>@sha256:<digest>'
|
||||||
|
and its associated claims:
|
||||||
|
- ✅ SLSA Level 3 provenance
|
||||||
|
- ✅ GitHub repo: freedomofpress/dangerzone
|
||||||
|
- ✅ GitHub actions workflow: <workflow>
|
||||||
|
- ✅ Git branch: <branch>
|
||||||
|
- ✅ Git commit: <commit>
|
||||||
|
```
|
||||||
|
|
||||||
|
## Sign and publish the remote image
|
||||||
|
|
||||||
|
Once the image has been reproduced locally, we can add a signature to the container registry,
|
||||||
|
and update the `latest` tag to point to the proper hash.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cosign sign --sk ghcr.io/freedomofpress/dangerzone/dangerzone:${TAG}@sha256:${DIGEST}
|
||||||
|
|
||||||
|
# And mark bump latest
|
||||||
|
crane auth login ghcr.io -u USERNAME --password $(cat pat_token)
|
||||||
|
crane tag ghcr.io/freedomofpress/dangerzone/dangerzone@sha256:${DIGEST} latest
|
||||||
|
```
|
||||||
|
|
||||||
|
## Install updates
|
||||||
|
|
||||||
|
To check if a new container image has been released, and update your local installation with it, you can use the following commands:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
dangerzone-image upgrade ghcr.io/freedomofpress/dangerzone/dangerzone
|
||||||
|
```
|
||||||
|
|
||||||
|
## Verify locally
|
||||||
|
|
||||||
|
You can verify that the image you have locally matches the stored signatures, and that these have been signed with a trusted public key:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
dangerzone-image verify-local ghcr.io/freedomofpress/dangerzone/dangerzone
|
||||||
|
```
|
||||||
|
|
||||||
|
## Installing image updates to air-gapped environments
|
||||||
|
|
||||||
|
Three steps are required:
|
||||||
|
|
||||||
|
1. Prepare the archive
|
||||||
|
2. Transfer the archive to the air-gapped system
|
||||||
|
3. Install the archive on the air-gapped system
|
||||||
|
|
||||||
|
This archive will contain all the needed material to validate that the new container image has been signed and is valid.
|
||||||
|
|
||||||
|
On the machine on which you prepare the packages:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
dangerzone-image prepare-archive --output dz-fa94872.tar ghcr.io/freedomofpress/dangerzone/dangerzone@sha256:<digest>
|
||||||
|
```
|
||||||
|
|
||||||
|
On the airgapped machine, copy the file and run the following command:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
dangerzone-image load-archive dz-fa94872.tar
|
||||||
|
```
|
|
@ -11,8 +11,7 @@ https://github.com/freedomofpress/dangerzone/wiki/Updates
|
||||||
|
|
||||||
## Design overview
|
## Design overview
|
||||||
|
|
||||||
This feature introduces a hamburger icon that will be visible across almost all
|
A hamburger icon is visible across almost all of the Dangerzone windows, and is used to notify the users when there are new releases.
|
||||||
of the Dangerzone windows. This will be used to notify the users about updates.
|
|
||||||
|
|
||||||
### First run
|
### First run
|
||||||
|
|
||||||
|
@ -21,8 +20,7 @@ _We detect it's the first time Dangerzone runs because the
|
||||||
|
|
||||||
Add the following keys in our `settings.json` file.
|
Add the following keys in our `settings.json` file.
|
||||||
|
|
||||||
* `"updater_check": None`: Whether to check for updates or not. `None` means
|
* `"updater_check_all": True`: Whether or not to check and apply independent container updates and check for new releases.
|
||||||
that the user has not decided yet, and is the default.
|
|
||||||
* `"updater_last_check": None`: The last time we checked for updates (in seconds
|
* `"updater_last_check": None`: The last time we checked for updates (in seconds
|
||||||
from Unix epoch). None means that we haven't checked yet.
|
from Unix epoch). None means that we haven't checked yet.
|
||||||
* `"updater_latest_version": "0.4.2"`: The latest version that the Dangerzone
|
* `"updater_latest_version": "0.4.2"`: The latest version that the Dangerzone
|
||||||
|
@ -32,43 +30,19 @@ Add the following keys in our `settings.json` file.
|
||||||
* `"updater_errors: 0`: The number of update check errors that we have
|
* `"updater_errors: 0`: The number of update check errors that we have
|
||||||
encountered in a row.
|
encountered in a row.
|
||||||
|
|
||||||
Note:
|
Previously, `"updater_check"` was used to determine if we should check for new releases, and has been replaced by `"updater_check_all"` when adding support for independent container updates.
|
||||||
|
|
||||||
* If on Linux, make `"updater_check": False`, since we normally have
|
|
||||||
other update channels for these platforms.
|
|
||||||
|
|
||||||
### Second run
|
### Second run
|
||||||
|
|
||||||
_We detect it's the second time Dangerzone runs because
|
_We detect it's the second time Dangerzone runs because
|
||||||
`settings["updater_check"] is not None and settings["updater_last_check"] is
|
`settings["updater_check_all"] is not None and settings["updater_last_check"] is
|
||||||
None`._
|
None`._
|
||||||
|
|
||||||
Before starting up the main window, show this window:
|
Before starting up the main window, the user is prompted if they want to enable update checks.
|
||||||
|
|
||||||
* Title: Dangerzone Updater
|
|
||||||
* Body:
|
|
||||||
|
|
||||||
> Do you want Dangerzone to automatically check for updates?
|
|
||||||
>
|
|
||||||
> If you accept, Dangerzone will check the latest releases page in github.com
|
|
||||||
> on startup. Otherwise it will make no network requests and won't inform you
|
|
||||||
> about new releases.
|
|
||||||
>
|
|
||||||
> If you prefer another way of getting notified about new releases, we suggest adding
|
|
||||||
> to your RSS reader our [Mastodon feed](https://fosstodon.org/@dangerzone.rss). For more information
|
|
||||||
> about updates, check [this webpage](https://github.com/freedomofpress/dangerzone/wiki/Updates).
|
|
||||||
|
|
||||||
* Buttons:
|
|
||||||
- Check Automaticaly: Store `settings["updater_check"] = True`
|
|
||||||
- Don't Check: Store `settings["updater_check"] = False`
|
|
||||||
|
|
||||||
Note:
|
|
||||||
* Users will be able to change their choice from the hamburger menu, which will
|
|
||||||
contain an entry called "Check for updates", that users can check and uncheck.
|
|
||||||
|
|
||||||
### Subsequent runs
|
### Subsequent runs
|
||||||
|
|
||||||
_We perform the following only if `settings["updater_check"] == True`._
|
_We perform the following only if `settings["updater_check_all"] == True`._
|
||||||
|
|
||||||
1. Spawn a new thread so that we don't block the main window.
|
1. Spawn a new thread so that we don't block the main window.
|
||||||
2. Check if we have cached information about a release (version and changelog).
|
2. Check if we have cached information about a release (version and changelog).
|
||||||
|
|
|
@ -6,7 +6,7 @@ import sys
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
BUILD_CONTEXT = "dangerzone"
|
BUILD_CONTEXT = "dangerzone"
|
||||||
IMAGE_NAME = "dangerzone.rocks/dangerzone"
|
IMAGE_NAME = "ghcr.io/freedomofpress/dangerzone/dangerzone"
|
||||||
if platform.system() in ["Darwin", "Windows"]:
|
if platform.system() in ["Darwin", "Windows"]:
|
||||||
CONTAINER_RUNTIME = "docker"
|
CONTAINER_RUNTIME = "docker"
|
||||||
elif platform.system() == "Linux":
|
elif platform.system() == "Linux":
|
||||||
|
|
|
@ -27,6 +27,7 @@ packaging = "*"
|
||||||
[tool.poetry.scripts]
|
[tool.poetry.scripts]
|
||||||
dangerzone = 'dangerzone:main'
|
dangerzone = 'dangerzone:main'
|
||||||
dangerzone-cli = 'dangerzone:main'
|
dangerzone-cli = 'dangerzone:main'
|
||||||
|
dangerzone-image = "dangerzone.updater.cli:main"
|
||||||
|
|
||||||
# Dependencies required for packaging the code on various platforms.
|
# Dependencies required for packaging the code on various platforms.
|
||||||
[tool.poetry.group.package.dependencies]
|
[tool.poetry.group.package.dependencies]
|
||||||
|
|
7
tests/assets/signatures/README.md
Normal file
7
tests/assets/signatures/README.md
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
This folder contains signature-folders used for the testing the signatures implementation.
|
||||||
|
|
||||||
|
The following folders are used:
|
||||||
|
|
||||||
|
- `valid`: this folder contains signatures which should be considered valid and generated with the key available at `tests/assets/test.pub.key`
|
||||||
|
- `invalid`: this folder contains signatures which should be considered invalid, because their format doesn't match the expected one. e.g. it uses plain text instead of base64-encoded text.
|
||||||
|
- `tempered`: This folder contain signatures which have been tempered-with. The goal is to have signatures that looks valid, but actually aren't.
|
|
@ -0,0 +1,18 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"Base64Signature": "Invalid base64 signature",
|
||||||
|
"Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hbG1ldC9kYW5nZXJ6b25lL2RhbmdlcnpvbmUifSwiaW1hZ2UiOnsiZG9ja2VyLW1hbmlmZXN0LWRpZ2VzdCI6InNoYTI1NjoxOWU4ZWFjZDc1ODc5ZDA1ZjY2MjFjMmVhOGRkOTU1ZTY4ZWUzZTA3YjQxYjlkNTNmNGM4Y2M5OTI5YTY4YTY3In0sInR5cGUiOiJjb3NpZ24gY29udGFpbmVyIGltYWdlIHNpZ25hdHVyZSJ9LCJvcHRpb25hbCI6bnVsbH0=",
|
||||||
|
"Cert": null,
|
||||||
|
"Chain": null,
|
||||||
|
"Bundle": {
|
||||||
|
"SignedEntryTimestamp": "MEUCIC9oXH9VVP96frVOmDw704FBqMN/Bpm2RMdTm6BtSwL/AiEA6mCIjhV65fYuy4CwjsIzQHi/oW6IBwtd6oCvN2dI6HQ=",
|
||||||
|
"Payload": {
|
||||||
|
"body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiJmMjEwNDJjY2RjOGU0ZjA1ZGEzNmE5ZjU4ODg5MmFlZGRlMzYzZTQ2ZWNjZGZjM2MyNzAyMTkwZDU0YTdmZmVlIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FWUNJUUNWaTJGUFI3Mjl1aHAvY3JFdUNTOW9yQzRhMnV0OHN3dDdTUnZXYUVSTGp3SWhBSlM1dzU3MHhsQnJsM2Nhd1Y1akQ1dk85RGh1dkNrdCtzOXJLdGc2NzVKQSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=",
|
||||||
|
"integratedTime": 1738752154,
|
||||||
|
"logIndex": 168898587,
|
||||||
|
"logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"RFC3161Timestamp": null
|
||||||
|
}
|
||||||
|
]
|
|
@ -0,0 +1,18 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"Base64Signature": "MEQCICi2AOAJbS1k3334VMSo+qxaI4f5VoNnuVExZ4tfIu7rAiAiwuKdo8rGfFMGMLSFSQvoLF3JuwFy4JtNW6kQlwH7vg==",
|
||||||
|
"Payload": "Invalid base64 payload",
|
||||||
|
"Cert": null,
|
||||||
|
"Chain": null,
|
||||||
|
"Bundle": {
|
||||||
|
"SignedEntryTimestamp": "MEUCIEvx6NtFeAag9TplqMLjVczT/tC6lpKe9SnrxbehBlxfAiEA07BE3f5JsMLsUsmHD58D6GaZr2yz+yQ66Os2ps8oKz8=",
|
||||||
|
"Payload": {
|
||||||
|
"body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiI4YmJmNGRiNjBmMmExM2IyNjI2NTI3MzljNWM5ZTYwNjNiMDYyNjVlODU1Zjc3MTdjMTdlYWY4YzViZTQyYWUyIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FUUNJQ2kyQU9BSmJTMWszMzM0Vk1TbytxeGFJNGY1Vm9ObnVWRXhaNHRmSXU3ckFpQWl3dUtkbzhyR2ZGTUdNTFNGU1F2b0xGM0p1d0Z5NEp0Tlc2a1Fsd0g3dmc9PSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=",
|
||||||
|
"integratedTime": 1738859497,
|
||||||
|
"logIndex": 169356501,
|
||||||
|
"logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"RFC3161Timestamp": null
|
||||||
|
}
|
||||||
|
]
|
|
@ -0,0 +1,18 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"Base64Signature": "MEQCIDJxvB7lBU+VNYBD0xw/3Bi8wY7GPJ2fBP7mUFbguApoAiAIpuQT+sgatOY6yXkkA8K/sM40d5/gt7jQywWPbq5+iw==",
|
||||||
|
"Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hcHlyZ2lvL2RhbmdlcnpvbmUvZGFuZ2Vyem9uZSJ9LCJpbWFnZSI6eyJkb2NrZXItbWFuaWZlc3QtZGlnZXN0Ijoic2hhMjU2OjRkYTQ0MTIzNWU4NGU5MzUxODc3ODgyN2E1YzU3NDVkNTMyZDdhNDA3OTg4NmUxNjQ3OTI0YmVlN2VmMWMxNGQifSwidHlwZSI6ImNvc2lnbiBjb250YWluZXIgaW1hZ2Ugc2lnbmF0dXJlIn0sIm9wdGlvbmFsIjpudWxsfQ==",
|
||||||
|
"Cert": null,
|
||||||
|
"Chain": null,
|
||||||
|
"Bundle": {
|
||||||
|
"SignedEntryTimestamp": "Invalid signed entry timestamp",
|
||||||
|
"Payload": {
|
||||||
|
"body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiIyMGE2ZDU1NTk4Y2U0NjU3NWZkZjViZGU3YzhhYWE2YTU2ZjZlMGRmOWNiYTY1MTJhMDAxODhjMTU1NGIzYjE3In19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FUUNJREp4dkI3bEJVK1ZOWUJEMHh3LzNCaTh3WTdHUEoyZkJQN21VRmJndUFwb0FpQUlwdVFUK3NnYXRPWTZ5WGtrQThLL3NNNDBkNS9ndDdqUXl3V1BicTUraXc9PSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=",
|
||||||
|
"integratedTime": 1738688492,
|
||||||
|
"logIndex": 168652066,
|
||||||
|
"logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"RFC3161Timestamp": null
|
||||||
|
}
|
||||||
|
]
|
|
@ -0,0 +1,18 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"Base64Signature": "MEUCIQC2WlJH+B8VuX1c6i4sDwEGEZc53hXUD6/ds9TMJ3HrfwIgCxSnrNYRD2c8XENqfqc+Ik1gx0DK9kPNsn/Lt8V/dCo=",
|
||||||
|
"Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hbG1ldC9kYW5nZXJ6b25lL2RhbmdlcnpvbmUifSwiaW1hZ2UiOnsiZG9ja2VyLW1hbmlmZXN0LWRpZ2VzdCI6InNoYTI1Njo3YjIxZGJkZWJmZmVkODU1NjIxZGZjZGVhYTUyMjMwZGM2NTY2OTk3Zjg1MmVmNWQ2MmIwMzM4YjQ2Nzk2ZTAxIn0sInR5cGUiOiJjb3NpZ24gY29udGFpbmVyIGltYWdlIHNpZ25hdHVyZSJ9LCJvcHRpb25hbCI6bnVsbH0=",
|
||||||
|
"Cert": null,
|
||||||
|
"Chain": null,
|
||||||
|
"Bundle": {
|
||||||
|
"SignedEntryTimestamp": "MEYCIQDn04gOHqiZcwUO+NVV9+29+abu6O/k1ve9zatJ3gVu9QIhAJL3E+mqVPdMPfMSdhHt2XDQsYzfRDDJNJEABQlbV3Jg",
|
||||||
|
"Payload": {
|
||||||
|
"body": "Invalid bundle payload body",
|
||||||
|
"integratedTime": 1738862352,
|
||||||
|
"logIndex": 169369149,
|
||||||
|
"logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"RFC3161Timestamp": null
|
||||||
|
}
|
||||||
|
]
|
|
@ -0,0 +1,18 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"Base64Signature": "MAIhAJWLYU9Hvb26Gn9ysS4JL2isLhra63yzC3tJG9ZoREuPAiEAlLnDnvTGUGuXdxrBXmMPm870OG68KS36z2sq2DrvkkAK",
|
||||||
|
"Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hbG1ldC9kYW5nZXJ6b25lL2RhbmdlcnpvbmUifSwiaW1hZ2UiOnsiZG9ja2VyLW1hbmlmZXN0LWRpZ2VzdCI6InNoYTI1NjoxOWU4ZWFjZDc1ODc5ZDA1ZjY2MjFjMmVhOGRkOTU1ZTY4ZWUzZTA3YjQxYjlkNTNmNGM4Y2M5OTI5YTY4YTY3In0sInR5cGUiOiJjb3NpZ24gY29udGFpbmVyIGltYWdlIHNpZ25hdHVyZSJ9LCJvcHRpb25hbCI6bnVsbH0=",
|
||||||
|
"Cert": null,
|
||||||
|
"Chain": null,
|
||||||
|
"Bundle": {
|
||||||
|
"SignedEntryTimestamp": "MEUCIC9oXH9VVP96frVOmDw704FBqMN/Bpm2RMdTm6BtSwL/AiEA6mCIjhV65fYuy4CwjsIzQHi/oW6IBwtd6oCvN2dI6HQ=",
|
||||||
|
"Payload": {
|
||||||
|
"body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiJmMjEwNDJjY2RjOGU0ZjA1ZGEzNmE5ZjU4ODg5MmFlZGRlMzYzZTQ2ZWNjZGZjM2MyNzAyMTkwZDU0YTdmZmVlIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FWUNJUUNWaTJGUFI3Mjl1aHAvY3JFdUNTOW9yQzRhMnV0OHN3dDdTUnZXYUVSTGp3SWhBSlM1dzU3MHhsQnJsM2Nhd1Y1akQ1dk85RGh1dkNrdCtzOXJLdGc2NzVKQSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=",
|
||||||
|
"integratedTime": 1738752154,
|
||||||
|
"logIndex": 168898587,
|
||||||
|
"logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"RFC3161Timestamp": null
|
||||||
|
}
|
||||||
|
]
|
|
@ -0,0 +1,18 @@
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"Base64Signature": "MEQCICi2AOAJbS1k3334VMSo+qxaI4f5VoNnuVExZ4tfIu7rAiAiwuKdo8rGfFMGMLSFSQvoLF3JuwFy4JtNW6kQlwH7vg==",
|
||||||
|
"Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9oNHh4MHIvZGFuZ2Vyem9uZS9kYW5nZXJ6b25lIn0sImltYWdlIjp7ImRvY2tlci1tYW5pZmVzdC1kaWdlc3QiOiJzaGEyNTY6MjIwYjUyMjAwZTNlNDdiMWI0MjAxMDY2N2ZjYWE5MzM4NjgxZTY0ZGQzZTM0YTM0ODczODY2Y2IwNTFkNjk0ZSJ9LCJ0eXBlIjoiY29zaWduIGNvbnRhaW5lciBpbWFnZSBzaWduYXR1cmUifSwib3B0aW9uYWwiOm51bGx9Cg==",
|
||||||
|
"Cert": null,
|
||||||
|
"Chain": null,
|
||||||
|
"Bundle": {
|
||||||
|
"SignedEntryTimestamp": "MEUCIEvx6NtFeAag9TplqMLjVczT/tC6lpKe9SnrxbehBlxfAiEA07BE3f5JsMLsUsmHD58D6GaZr2yz+yQ66Os2ps8oKz8=",
|
||||||
|
"Payload": {
|
||||||
|
"body": "eyJhcGlWZXJzaW9uIjoiNi42LjYiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiI4YmJmNGRiNjBmMmExM2IyNjI2NTI3MzljNWM5ZTYwNjNiMDYyNjVlODU1Zjc3MTdjMTdlYWY4YzViZTQyYWUyIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FUUNJQ2kyQU9BSmJTMWszMzM0Vk1TbytxeGFJNGY1Vm9ObnVWRXhaNHRmSXU3ckFpQWl3dUtkbzhyR2ZGTUdNTFNGU1F2b0xGM0p1d0Z5NEp0Tlc2a1Fsd0g3dmc9PSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0K",
|
||||||
|
"integratedTime": 1738859497,
|
||||||
|
"logIndex": 169356501,
|
||||||
|
"logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"RFC3161Timestamp": null
|
||||||
|
}
|
||||||
|
]
|
|
@ -0,0 +1 @@
|
||||||
|
This folder contain signatures which have been tempered-with. The goal is to have signatures that looks valid, but actually aren't.
|
|
@ -0,0 +1 @@
|
||||||
|
[{"Base64Signature": "MEYCIQCVi2FPR729uhp/crEuCS9orC4a2ut8swt7SRvWaERLjwIhAJS5w570xlBrl3cawV5jD5vO9DhuvCkt+s9rKtg675JA", "Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hbG1ldC9kYW5nZXJ6b25lL2RhbmdlcnpvbmUifSwiaW1hZ2UiOnsiZG9ja2VyLW1hbmlmZXN0LWRpZ2VzdCI6InNoYTI1NjoxOWU4ZWFjZDc1ODc5ZDA1ZjY2MjFjMmVhOGRkOTU1ZTY4ZWUzZTA3YjQxYjlkNTNmNGM4Y2M5OTI5YTY4YTY3In0sInR5cGUiOiJjb3NpZ24gY29udGFpbmVyIGltYWdlIHNpZ25hdHVyZSJ9LCJvcHRpb25hbCI6bnVsbH0=", "Cert": null, "Chain": null, "Bundle": {"SignedEntryTimestamp": "MEUCIC9oXH9VVP96frVOmDw704FBqMN/Bpm2RMdTm6BtSwL/AiEA6mCIjhV65fYuy4CwjsIzQHi/oW6IBwtd6oCvN2dI6HQ=", "Payload": {"body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiJmMjEwNDJjY2RjOGU0ZjA1ZGEzNmE5ZjU4ODg5MmFlZGRlMzYzZTQ2ZWNjZGZjM2MyNzAyMTkwZDU0YTdmZmVlIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FWUNJUUNWaTJGUFI3Mjl1aHAvY3JFdUNTOW9yQzRhMnV0OHN3dDdTUnZXYUVSTGp3SWhBSlM1dzU3MHhsQnJsM2Nhd1Y1akQ1dk85RGh1dkNrdCtzOXJLdGc2NzVKQSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=", "integratedTime": 1738752154, "logIndex": 168898587, "logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d"}}, "RFC3161Timestamp": null}]
|
|
@ -0,0 +1 @@
|
||||||
|
[{"Base64Signature": "MEQCICi2AOAJbS1k3334VMSo+qxaI4f5VoNnuVExZ4tfIu7rAiAiwuKdo8rGfFMGMLSFSQvoLF3JuwFy4JtNW6kQlwH7vg==", "Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hbG1ldC9kYW5nZXJ6b25lL2RhbmdlcnpvbmUifSwiaW1hZ2UiOnsiZG9ja2VyLW1hbmlmZXN0LWRpZ2VzdCI6InNoYTI1NjoyMjBiNTIyMDBlM2U0N2IxYjQyMDEwNjY3ZmNhYTkzMzg2ODFlNjRkZDNlMzRhMzQ4NzM4NjZjYjA1MWQ2OTRlIn0sInR5cGUiOiJjb3NpZ24gY29udGFpbmVyIGltYWdlIHNpZ25hdHVyZSJ9LCJvcHRpb25hbCI6bnVsbH0=", "Cert": null, "Chain": null, "Bundle": {"SignedEntryTimestamp": "MEUCIEvx6NtFeAag9TplqMLjVczT/tC6lpKe9SnrxbehBlxfAiEA07BE3f5JsMLsUsmHD58D6GaZr2yz+yQ66Os2ps8oKz8=", "Payload": {"body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiI4YmJmNGRiNjBmMmExM2IyNjI2NTI3MzljNWM5ZTYwNjNiMDYyNjVlODU1Zjc3MTdjMTdlYWY4YzViZTQyYWUyIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FUUNJQ2kyQU9BSmJTMWszMzM0Vk1TbytxeGFJNGY1Vm9ObnVWRXhaNHRmSXU3ckFpQWl3dUtkbzhyR2ZGTUdNTFNGU1F2b0xGM0p1d0Z5NEp0Tlc2a1Fsd0g3dmc9PSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=", "integratedTime": 1738859497, "logIndex": 169356501, "logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d"}}, "RFC3161Timestamp": null}]
|
|
@ -0,0 +1 @@
|
||||||
|
[{"Base64Signature": "MEQCIDJxvB7lBU+VNYBD0xw/3Bi8wY7GPJ2fBP7mUFbguApoAiAIpuQT+sgatOY6yXkkA8K/sM40d5/gt7jQywWPbq5+iw==", "Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hcHlyZ2lvL2RhbmdlcnpvbmUvZGFuZ2Vyem9uZSJ9LCJpbWFnZSI6eyJkb2NrZXItbWFuaWZlc3QtZGlnZXN0Ijoic2hhMjU2OjRkYTQ0MTIzNWU4NGU5MzUxODc3ODgyN2E1YzU3NDVkNTMyZDdhNDA3OTg4NmUxNjQ3OTI0YmVlN2VmMWMxNGQifSwidHlwZSI6ImNvc2lnbiBjb250YWluZXIgaW1hZ2Ugc2lnbmF0dXJlIn0sIm9wdGlvbmFsIjpudWxsfQ==", "Cert": null, "Chain": null, "Bundle": {"SignedEntryTimestamp": "MEYCIQDuuuHoyZ2i4HKxik4Ju/MWkELwc1w5SfzcpCV7G+vZHAIhAO25R/+lIfQ/kMfC4PfeoWDwLpvnH9cq6dVSzl12i1su", "Payload": {"body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiIyMGE2ZDU1NTk4Y2U0NjU3NWZkZjViZGU3YzhhYWE2YTU2ZjZlMGRmOWNiYTY1MTJhMDAxODhjMTU1NGIzYjE3In19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FUUNJREp4dkI3bEJVK1ZOWUJEMHh3LzNCaTh3WTdHUEoyZkJQN21VRmJndUFwb0FpQUlwdVFUK3NnYXRPWTZ5WGtrQThLL3NNNDBkNS9ndDdqUXl3V1BicTUraXc9PSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=", "integratedTime": 1738688492, "logIndex": 168652066, "logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d"}}, "RFC3161Timestamp": null}]
|
|
@ -0,0 +1 @@
|
||||||
|
[{"Base64Signature": "MEUCIQC2WlJH+B8VuX1c6i4sDwEGEZc53hXUD6/ds9TMJ3HrfwIgCxSnrNYRD2c8XENqfqc+Ik1gx0DK9kPNsn/Lt8V/dCo=", "Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hbG1ldC9kYW5nZXJ6b25lL2RhbmdlcnpvbmUifSwiaW1hZ2UiOnsiZG9ja2VyLW1hbmlmZXN0LWRpZ2VzdCI6InNoYTI1Njo3YjIxZGJkZWJmZmVkODU1NjIxZGZjZGVhYTUyMjMwZGM2NTY2OTk3Zjg1MmVmNWQ2MmIwMzM4YjQ2Nzk2ZTAxIn0sInR5cGUiOiJjb3NpZ24gY29udGFpbmVyIGltYWdlIHNpZ25hdHVyZSJ9LCJvcHRpb25hbCI6bnVsbH0=", "Cert": null, "Chain": null, "Bundle": {"SignedEntryTimestamp": "MEYCIQDn04gOHqiZcwUO+NVV9+29+abu6O/k1ve9zatJ3gVu9QIhAJL3E+mqVPdMPfMSdhHt2XDQsYzfRDDJNJEABQlbV3Jg", "Payload": {"body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiIzZWQwNWJlYTc2ZWFmMzBmYWM1NzBlNzhlODBlZmQxNDNiZWQxNzFjM2VjMDY5MWI2MDU3YjdhMDAzNGEyMzhlIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FVUNJUUMyV2xKSCtCOFZ1WDFjNmk0c0R3RUdFWmM1M2hYVUQ2L2RzOVRNSjNIcmZ3SWdDeFNuck5ZUkQyYzhYRU5xZnFjK0lrMWd4MERLOWtQTnNuL0x0OFYvZENvPSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=", "integratedTime": 1738862352, "logIndex": 169369149, "logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d"}}, "RFC3161Timestamp": null}]
|
|
@ -0,0 +1 @@
|
||||||
|
[{"Base64Signature": "MEQCIHqXEMuAmt1pFCsHC71+ejlG5kjKrf1+AQW202OY3vhsAiA0BoDAVgAk9K7SgIRBpIV6u0veyB1iypzV0DteNh3IoQ==", "Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hbG1ldC9kYW5nZXJ6b25lL2RhbmdlcnpvbmUifSwiaW1hZ2UiOnsiZG9ja2VyLW1hbmlmZXN0LWRpZ2VzdCI6InNoYTI1NjpmYTk0ODcyNmFhYzI5YTZhYzQ5ZjAxZWM4ZmJiYWMxODUyMmIzNWIyNDkxZmRmNzE2MjM2YTBiMzUwMmEyY2E3In0sInR5cGUiOiJjb3NpZ24gY29udGFpbmVyIGltYWdlIHNpZ25hdHVyZSJ9LCJvcHRpb25hbCI6bnVsbH0=", "Cert": null, "Chain": null, "Bundle": {"SignedEntryTimestamp": "MEUCIQCrZ+2SSYdpIOEbyUXXaBxeqT8RTujpqdXipls9hmNvDgIgdWV84PiCY2cI49QjHjun7lj25/znGMDiwjCuPjIPA6Q=", "Payload": {"body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiI5ZjcwM2I4NTM4MjM4N2U2OTgwNzYxNDg1YzU0NGIzNmJmMThmNTA5ODQwMTMxYzRmOTJhMjE4OTI3MTJmNDJmIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FUUNJSHFYRU11QW10MXBGQ3NIQzcxK2VqbEc1a2pLcmYxK0FRVzIwMk9ZM3Zoc0FpQTBCb0RBVmdBazlLN1NnSVJCcElWNnUwdmV5QjFpeXB6VjBEdGVOaDNJb1E9PSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=", "integratedTime": 1737478056, "logIndex": 164177381, "logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d"}}, "RFC3161Timestamp": null}, {"Base64Signature": "MEYCIQDg8MeymBLOn+Khue0yK1yQy4Fu/+GXmyC/xezXO/p1JgIhAN6QLojKzkZGxyYirbqRbZCVcIM4YN3Y18FXwpW4RuUy", "Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hbG1ldC9kYW5nZXJ6b25lL2RhbmdlcnpvbmUifSwiaW1hZ2UiOnsiZG9ja2VyLW1hbmlmZXN0LWRpZ2VzdCI6InNoYTI1NjpmYTk0ODcyNmFhYzI5YTZhYzQ5ZjAxZWM4ZmJiYWMxODUyMmIzNWIyNDkxZmRmNzE2MjM2YTBiMzUwMmEyY2E3In0sInR5cGUiOiJjb3NpZ24gY29udGFpbmVyIGltYWdlIHNpZ25hdHVyZSJ9LCJvcHRpb25hbCI6bnVsbH0=", "Cert": null, "Chain": null, "Bundle": {"SignedEntryTimestamp": "MEUCIQCQLlrH2xo/bA6r386vOwA0OjUe0TqcxROT/Wo220jvGgIgPgRlKnQxWoXlD/Owf1Ogk5XlfXAt2f416LDbk4AoEvk=", "Payload": {"body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiI5ZjcwM2I4NTM4MjM4N2U2OTgwNzYxNDg1YzU0NGIzNmJmMThmNTA5ODQwMTMxYzRmOTJhMjE4OTI3MTJmNDJmIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FWUNJUURnOE1leW1CTE9uK0todWUweUsxeVF5NEZ1LytHWG15Qy94ZXpYTy9wMUpnSWhBTjZRTG9qS3prWkd4eVlpcmJxUmJaQ1ZjSU00WU4zWTE4Rlh3cFc0UnVVeSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=", "integratedTime": 1737557525, "logIndex": 164445483, "logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d"}}, "RFC3161Timestamp": null}, {"Base64Signature": "MEQCIEhUVYVW6EdovGDSSZt1Ffc86OfzEKAas94M4eFK7hoFAiA4+6219LktmgJSKuc2ObsnL5QjHyNLk58BwY0s8gBHbQ==", "Payload": "eyJjcml0aWNhbCI6eyJpZGVudGl0eSI6eyJkb2NrZXItcmVmZXJlbmNlIjoiZ2hjci5pby9hbG1ldC9kYW5nZXJ6b25lL2RhbmdlcnpvbmUifSwiaW1hZ2UiOnsiZG9ja2VyLW1hbmlmZXN0LWRpZ2VzdCI6InNoYTI1NjpmYTk0ODcyNmFhYzI5YTZhYzQ5ZjAxZWM4ZmJiYWMxODUyMmIzNWIyNDkxZmRmNzE2MjM2YTBiMzUwMmEyY2E3In0sInR5cGUiOiJjb3NpZ24gY29udGFpbmVyIGltYWdlIHNpZ25hdHVyZSJ9LCJvcHRpb25hbCI6bnVsbH0=", "Cert": null, "Chain": null, "Bundle": {"SignedEntryTimestamp": "MEQCIDRUTMwL+/eW79ARRLE8h/ByCrvo0rOn3vUYQg1E6KIBAiBi/bzoqcL2Ik27KpwfFosww4l7yI+9IqwCvUlkQgEB7g==", "Payload": {"body": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiI5ZjcwM2I4NTM4MjM4N2U2OTgwNzYxNDg1YzU0NGIzNmJmMThmNTA5ODQwMTMxYzRmOTJhMjE4OTI3MTJmNDJmIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FUUNJRWhVVllWVzZFZG92R0RTU1p0MUZmYzg2T2Z6RUtBYXM5NE00ZUZLN2hvRkFpQTQrNjIxOUxrdG1nSlNLdWMyT2Jzbkw1UWpIeU5MazU4QndZMHM4Z0JIYlE9PSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCUVZVSk1TVU1nUzBWWkxTMHRMUzBLVFVacmQwVjNXVWhMYjFwSmVtb3dRMEZSV1VsTGIxcEplbW93UkVGUlkwUlJaMEZGYjBVd1ExaE1SMlptTnpsbVVqaExlVkJ1VTNaUFdUYzBWVUpyZEFveWMweHBLMkZXUmxWNlV6RlJkM1EwZDI5emVFaG9ZMFJPTWtJMlVWTnpUR3gyWjNOSU9ESnhObkZqUVRaUVRESlRaRk12Y0RScVYwZEJQVDBLTFMwdExTMUZUa1FnVUZWQ1RFbERJRXRGV1MwdExTMHRDZz09In19fX0=", "integratedTime": 1737567664, "logIndex": 164484602, "logID": "c0d23d6ad406973f9559f3ba2d1ca01f84147d8ffc5b8445c224f98b9591801d"}}, "RFC3161Timestamp": null}]
|
4
tests/assets/test.pub.key
Normal file
4
tests/assets/test.pub.key
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
-----BEGIN PUBLIC KEY-----
|
||||||
|
MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEoE0CXLGff79fR8KyPnSvOY74UBkt
|
||||||
|
2sLi+aVFUzS1Qwt4wosxHhcDN2B6QSsLlvgsH82q6qcA6PL2SdS/p4jWGA==
|
||||||
|
-----END PUBLIC KEY-----
|
|
@ -9,10 +9,18 @@ import pytest
|
||||||
|
|
||||||
from dangerzone.document import SAFE_EXTENSION
|
from dangerzone.document import SAFE_EXTENSION
|
||||||
from dangerzone.gui import Application
|
from dangerzone.gui import Application
|
||||||
|
from dangerzone.isolation_provider import container
|
||||||
|
|
||||||
sys.dangerzone_dev = True # type: ignore[attr-defined]
|
sys.dangerzone_dev = True # type: ignore[attr-defined]
|
||||||
|
|
||||||
|
|
||||||
|
ASSETS_PATH = Path(__file__).parent / "assets"
|
||||||
|
TEST_PUBKEY_PATH = ASSETS_PATH / "test.pub.key"
|
||||||
|
INVALID_SIGNATURES_PATH = ASSETS_PATH / "signatures" / "invalid"
|
||||||
|
VALID_SIGNATURES_PATH = ASSETS_PATH / "signatures" / "valid"
|
||||||
|
TEMPERED_SIGNATURES_PATH = ASSETS_PATH / "signatures" / "tempered"
|
||||||
|
|
||||||
|
|
||||||
# Use this fixture to make `pytest-qt` invoke our custom QApplication.
|
# Use this fixture to make `pytest-qt` invoke our custom QApplication.
|
||||||
# See https://pytest-qt.readthedocs.io/en/latest/qapplication.html#testing-custom-qapplications
|
# See https://pytest-qt.readthedocs.io/en/latest/qapplication.html#testing-custom-qapplications
|
||||||
@pytest.fixture(scope="session")
|
@pytest.fixture(scope="session")
|
||||||
|
@ -111,6 +119,14 @@ def sample_pdf() -> str:
|
||||||
return str(test_docs_dir.joinpath(BASIC_SAMPLE_PDF))
|
return str(test_docs_dir.joinpath(BASIC_SAMPLE_PDF))
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def skip_image_verification(monkeypatch):
|
||||||
|
def noop(*args, **kwargs):
|
||||||
|
return True
|
||||||
|
|
||||||
|
monkeypatch.setattr(container, "verify_local_image", noop)
|
||||||
|
|
||||||
|
|
||||||
SAMPLE_DIRECTORY = "test_docs"
|
SAMPLE_DIRECTORY = "test_docs"
|
||||||
BASIC_SAMPLE_PDF = "sample-pdf.pdf"
|
BASIC_SAMPLE_PDF = "sample-pdf.pdf"
|
||||||
BASIC_SAMPLE_DOC = "sample-doc.doc"
|
BASIC_SAMPLE_DOC = "sample-doc.doc"
|
||||||
|
|
|
@ -24,9 +24,10 @@ from dangerzone.gui.main_window import (
|
||||||
QtGui,
|
QtGui,
|
||||||
WaitingWidgetContainer,
|
WaitingWidgetContainer,
|
||||||
)
|
)
|
||||||
from dangerzone.gui.updater import UpdateReport, UpdaterThread
|
from dangerzone.gui.updater import UpdaterThread
|
||||||
from dangerzone.isolation_provider.container import Container
|
from dangerzone.isolation_provider.container import Container
|
||||||
from dangerzone.isolation_provider.dummy import Dummy
|
from dangerzone.isolation_provider.dummy import Dummy
|
||||||
|
from dangerzone.updater import releases
|
||||||
|
|
||||||
from .test_updater import assert_report_equal, default_updater_settings
|
from .test_updater import assert_report_equal, default_updater_settings
|
||||||
|
|
||||||
|
@ -96,7 +97,7 @@ def test_default_menu(
|
||||||
updater: UpdaterThread,
|
updater: UpdaterThread,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Check that the default menu entries are in order."""
|
"""Check that the default menu entries are in order."""
|
||||||
updater.dangerzone.settings.set("updater_check", True)
|
updater.dangerzone.settings.set("updater_check_all", True)
|
||||||
|
|
||||||
window = MainWindow(updater.dangerzone)
|
window = MainWindow(updater.dangerzone)
|
||||||
menu_actions = window.hamburger_button.menu().actions()
|
menu_actions = window.hamburger_button.menu().actions()
|
||||||
|
@ -114,7 +115,7 @@ def test_default_menu(
|
||||||
|
|
||||||
toggle_updates_action.trigger()
|
toggle_updates_action.trigger()
|
||||||
assert not toggle_updates_action.isChecked()
|
assert not toggle_updates_action.isChecked()
|
||||||
assert updater.dangerzone.settings.get("updater_check") is False
|
assert updater.dangerzone.settings.get("updater_check_all") is False
|
||||||
|
|
||||||
|
|
||||||
def test_no_update(
|
def test_no_update(
|
||||||
|
@ -127,12 +128,12 @@ def test_no_update(
|
||||||
# Check that when no update is detected, e.g., due to update cooldown, an empty
|
# Check that when no update is detected, e.g., due to update cooldown, an empty
|
||||||
# report is received that does not affect the menu entries.
|
# report is received that does not affect the menu entries.
|
||||||
curtime = int(time.time())
|
curtime = int(time.time())
|
||||||
updater.dangerzone.settings.set("updater_check", True)
|
updater.dangerzone.settings.set("updater_check_all", True)
|
||||||
updater.dangerzone.settings.set("updater_errors", 9)
|
updater.dangerzone.settings.set("updater_errors", 9)
|
||||||
updater.dangerzone.settings.set("updater_last_check", curtime)
|
updater.dangerzone.settings.set("updater_last_check", curtime)
|
||||||
|
|
||||||
expected_settings = default_updater_settings()
|
expected_settings = default_updater_settings()
|
||||||
expected_settings["updater_check"] = True
|
expected_settings["updater_check_all"] = True
|
||||||
expected_settings["updater_errors"] = 0 # errors must be cleared
|
expected_settings["updater_errors"] = 0 # errors must be cleared
|
||||||
expected_settings["updater_last_check"] = curtime
|
expected_settings["updater_last_check"] = curtime
|
||||||
|
|
||||||
|
@ -147,7 +148,7 @@ def test_no_update(
|
||||||
|
|
||||||
# Check that the callback function gets an empty report.
|
# Check that the callback function gets an empty report.
|
||||||
handle_updates_spy.assert_called_once()
|
handle_updates_spy.assert_called_once()
|
||||||
assert_report_equal(handle_updates_spy.call_args.args[0], UpdateReport())
|
assert_report_equal(handle_updates_spy.call_args.args[0], releases.UpdateReport())
|
||||||
|
|
||||||
# Check that the menu entries remain exactly the same.
|
# Check that the menu entries remain exactly the same.
|
||||||
menu_actions_after = window.hamburger_button.menu().actions()
|
menu_actions_after = window.hamburger_button.menu().actions()
|
||||||
|
@ -165,14 +166,14 @@ def test_update_detected(
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test that a newly detected version leads to a notification to the user."""
|
"""Test that a newly detected version leads to a notification to the user."""
|
||||||
|
|
||||||
qt_updater.dangerzone.settings.set("updater_check", True)
|
qt_updater.dangerzone.settings.set("updater_check_all", True)
|
||||||
qt_updater.dangerzone.settings.set("updater_last_check", 0)
|
qt_updater.dangerzone.settings.set("updater_last_check", 0)
|
||||||
qt_updater.dangerzone.settings.set("updater_errors", 9)
|
qt_updater.dangerzone.settings.set("updater_errors", 9)
|
||||||
|
|
||||||
# Make requests.get().json() return the following dictionary.
|
# Make requests.get().json() return the following dictionary.
|
||||||
mock_upstream_info = {"tag_name": "99.9.9", "body": "changelog"}
|
mock_upstream_info = {"tag_name": "99.9.9", "body": "changelog"}
|
||||||
mocker.patch("dangerzone.gui.updater.requests.get")
|
mocker.patch("dangerzone.updater.releases.requests.get")
|
||||||
requests_mock = updater_module.requests.get
|
requests_mock = releases.requests.get
|
||||||
requests_mock().status_code = 200 # type: ignore [call-arg]
|
requests_mock().status_code = 200 # type: ignore [call-arg]
|
||||||
requests_mock().json.return_value = mock_upstream_info # type: ignore [attr-defined, call-arg]
|
requests_mock().json.return_value = mock_upstream_info # type: ignore [attr-defined, call-arg]
|
||||||
|
|
||||||
|
@ -191,12 +192,13 @@ def test_update_detected(
|
||||||
# Check that the callback function gets an update report.
|
# Check that the callback function gets an update report.
|
||||||
handle_updates_spy.assert_called_once()
|
handle_updates_spy.assert_called_once()
|
||||||
assert_report_equal(
|
assert_report_equal(
|
||||||
handle_updates_spy.call_args.args[0], UpdateReport("99.9.9", "<p>changelog</p>")
|
handle_updates_spy.call_args.args[0],
|
||||||
|
releases.UpdateReport("99.9.9", "<p>changelog</p>"),
|
||||||
)
|
)
|
||||||
|
|
||||||
# Check that the settings have been updated properly.
|
# Check that the settings have been updated properly.
|
||||||
expected_settings = default_updater_settings()
|
expected_settings = default_updater_settings()
|
||||||
expected_settings["updater_check"] = True
|
expected_settings["updater_check_all"] = True
|
||||||
expected_settings["updater_last_check"] = qt_updater.dangerzone.settings.get(
|
expected_settings["updater_last_check"] = qt_updater.dangerzone.settings.get(
|
||||||
"updater_last_check"
|
"updater_last_check"
|
||||||
)
|
)
|
||||||
|
@ -277,13 +279,13 @@ def test_update_error(
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test that an error during an update check leads to a notification to the user."""
|
"""Test that an error during an update check leads to a notification to the user."""
|
||||||
# Test 1 - Check that the first error does not notify the user.
|
# Test 1 - Check that the first error does not notify the user.
|
||||||
qt_updater.dangerzone.settings.set("updater_check", True)
|
qt_updater.dangerzone.settings.set("updater_check_all", True)
|
||||||
qt_updater.dangerzone.settings.set("updater_last_check", 0)
|
qt_updater.dangerzone.settings.set("updater_last_check", 0)
|
||||||
qt_updater.dangerzone.settings.set("updater_errors", 0)
|
qt_updater.dangerzone.settings.set("updater_errors", 0)
|
||||||
|
|
||||||
# Make requests.get() return an errorthe following dictionary.
|
# Make requests.get() return an error
|
||||||
mocker.patch("dangerzone.gui.updater.requests.get")
|
mocker.patch("dangerzone.updater.releases.requests.get")
|
||||||
requests_mock = updater_module.requests.get
|
requests_mock = releases.requests.get
|
||||||
requests_mock.side_effect = Exception("failed") # type: ignore [attr-defined]
|
requests_mock.side_effect = Exception("failed") # type: ignore [attr-defined]
|
||||||
|
|
||||||
window = MainWindow(qt_updater.dangerzone)
|
window = MainWindow(qt_updater.dangerzone)
|
||||||
|
@ -304,7 +306,7 @@ def test_update_error(
|
||||||
|
|
||||||
# Check that the settings have been updated properly.
|
# Check that the settings have been updated properly.
|
||||||
expected_settings = default_updater_settings()
|
expected_settings = default_updater_settings()
|
||||||
expected_settings["updater_check"] = True
|
expected_settings["updater_check_all"] = True
|
||||||
expected_settings["updater_last_check"] = qt_updater.dangerzone.settings.get(
|
expected_settings["updater_last_check"] = qt_updater.dangerzone.settings.get(
|
||||||
"updater_last_check"
|
"updater_last_check"
|
||||||
)
|
)
|
||||||
|
|
|
@ -12,7 +12,9 @@ from pytestqt.qtbot import QtBot
|
||||||
|
|
||||||
from dangerzone import settings
|
from dangerzone import settings
|
||||||
from dangerzone.gui import updater as updater_module
|
from dangerzone.gui import updater as updater_module
|
||||||
from dangerzone.gui.updater import UpdateReport, UpdaterThread
|
from dangerzone.gui.updater import UpdaterThread
|
||||||
|
from dangerzone.updater import releases
|
||||||
|
from dangerzone.updater.releases import UpdateReport
|
||||||
from dangerzone.util import get_version
|
from dangerzone.util import get_version
|
||||||
|
|
||||||
from ..test_settings import default_settings_0_4_1, save_settings
|
from ..test_settings import default_settings_0_4_1, save_settings
|
||||||
|
@ -104,7 +106,7 @@ def test_post_0_4_2_settings(
|
||||||
def test_linux_no_check(updater: UpdaterThread, monkeypatch: MonkeyPatch) -> None:
|
def test_linux_no_check(updater: UpdaterThread, monkeypatch: MonkeyPatch) -> None:
|
||||||
"""Ensure that Dangerzone on Linux does not make any update check."""
|
"""Ensure that Dangerzone on Linux does not make any update check."""
|
||||||
expected_settings = default_updater_settings()
|
expected_settings = default_updater_settings()
|
||||||
expected_settings["updater_check"] = False
|
expected_settings["updater_check_all"] = False
|
||||||
expected_settings["updater_last_check"] = None
|
expected_settings["updater_last_check"] = None
|
||||||
|
|
||||||
# XXX: Simulate Dangerzone installed via package manager.
|
# XXX: Simulate Dangerzone installed via package manager.
|
||||||
|
@ -116,15 +118,16 @@ def test_linux_no_check(updater: UpdaterThread, monkeypatch: MonkeyPatch) -> Non
|
||||||
|
|
||||||
def test_user_prompts(updater: UpdaterThread, mocker: MockerFixture) -> None:
|
def test_user_prompts(updater: UpdaterThread, mocker: MockerFixture) -> None:
|
||||||
"""Test prompting users to ask them if they want to enable update checks."""
|
"""Test prompting users to ask them if they want to enable update checks."""
|
||||||
|
settings = updater.dangerzone.settings
|
||||||
# First run
|
# First run
|
||||||
#
|
#
|
||||||
# When Dangerzone runs for the first time, users should not be asked to enable
|
# When Dangerzone runs for the first time, users should not be asked to enable
|
||||||
# updates.
|
# updates.
|
||||||
expected_settings = default_updater_settings()
|
expected_settings = default_updater_settings()
|
||||||
expected_settings["updater_check"] = None
|
expected_settings["updater_check_all"] = None
|
||||||
expected_settings["updater_last_check"] = 0
|
expected_settings["updater_last_check"] = 0
|
||||||
assert updater.should_check_for_updates() is False
|
assert updater.should_check_for_updates() is False
|
||||||
assert updater.dangerzone.settings.get_updater_settings() == expected_settings
|
assert settings.get_updater_settings() == expected_settings
|
||||||
|
|
||||||
# Second run
|
# Second run
|
||||||
#
|
#
|
||||||
|
@ -136,16 +139,16 @@ def test_user_prompts(updater: UpdaterThread, mocker: MockerFixture) -> None:
|
||||||
|
|
||||||
# Check disabling update checks.
|
# Check disabling update checks.
|
||||||
prompt_mock().launch.return_value = False # type: ignore [attr-defined]
|
prompt_mock().launch.return_value = False # type: ignore [attr-defined]
|
||||||
expected_settings["updater_check"] = False
|
expected_settings["updater_check_all"] = False
|
||||||
assert updater.should_check_for_updates() is False
|
assert updater.should_check_for_updates() is False
|
||||||
assert updater.dangerzone.settings.get_updater_settings() == expected_settings
|
assert settings.get_updater_settings() == expected_settings
|
||||||
|
|
||||||
# Reset the "updater_check" field and check enabling update checks.
|
# Reset the "updater_check_all" field and check enabling update checks.
|
||||||
updater.dangerzone.settings.set("updater_check", None)
|
settings.set("updater_check_all", None)
|
||||||
prompt_mock().launch.return_value = True # type: ignore [attr-defined]
|
prompt_mock().launch.return_value = True # type: ignore [attr-defined]
|
||||||
expected_settings["updater_check"] = True
|
expected_settings["updater_check_all"] = True
|
||||||
assert updater.should_check_for_updates() is True
|
assert updater.should_check_for_updates() is True
|
||||||
assert updater.dangerzone.settings.get_updater_settings() == expected_settings
|
assert settings.get_updater_settings() == expected_settings
|
||||||
|
|
||||||
# Third run
|
# Third run
|
||||||
#
|
#
|
||||||
|
@ -153,7 +156,7 @@ def test_user_prompts(updater: UpdaterThread, mocker: MockerFixture) -> None:
|
||||||
# checks.
|
# checks.
|
||||||
prompt_mock().side_effect = RuntimeError("Should not be called") # type: ignore [attr-defined]
|
prompt_mock().side_effect = RuntimeError("Should not be called") # type: ignore [attr-defined]
|
||||||
for check in [True, False]:
|
for check in [True, False]:
|
||||||
updater.dangerzone.settings.set("updater_check", check)
|
settings.set("updater_check_all", check)
|
||||||
assert updater.should_check_for_updates() == check
|
assert updater.should_check_for_updates() == check
|
||||||
|
|
||||||
|
|
||||||
|
@ -161,43 +164,44 @@ def test_update_checks(
|
||||||
updater: UpdaterThread, monkeypatch: MonkeyPatch, mocker: MockerFixture
|
updater: UpdaterThread, monkeypatch: MonkeyPatch, mocker: MockerFixture
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test version update checks."""
|
"""Test version update checks."""
|
||||||
|
settings = updater.dangerzone.settings
|
||||||
# This dictionary will simulate GitHub's response.
|
# This dictionary will simulate GitHub's response.
|
||||||
mock_upstream_info = {"tag_name": f"v{get_version()}", "body": "changelog"}
|
mock_upstream_info = {"tag_name": f"v{get_version()}", "body": "changelog"}
|
||||||
|
|
||||||
# Make requests.get().json() return the above dictionary.
|
# Make requests.get().json() return the above dictionary.
|
||||||
mocker.patch("dangerzone.gui.updater.requests.get")
|
mocker.patch("dangerzone.updater.releases.requests.get")
|
||||||
requests_mock = updater_module.requests.get
|
requests_mock = updater_module.releases.requests.get
|
||||||
requests_mock().status_code = 200 # type: ignore [call-arg]
|
requests_mock().status_code = 200 # type: ignore [call-arg]
|
||||||
requests_mock().json.return_value = mock_upstream_info # type: ignore [attr-defined, call-arg]
|
requests_mock().json.return_value = mock_upstream_info # type: ignore [attr-defined, call-arg]
|
||||||
|
|
||||||
# Always assume that we can perform multiple update checks in a row.
|
# Always assume that we can perform multiple update checks in a row.
|
||||||
monkeypatch.setattr(updater, "_should_postpone_update_check", lambda: False)
|
mocker.patch(
|
||||||
|
"dangerzone.updater.releases._should_postpone_update_check", return_value=False
|
||||||
|
)
|
||||||
|
|
||||||
# Test 1 - Check that the current version triggers no updates.
|
# Test 1 - Check that the current version triggers no updates.
|
||||||
report = updater.check_for_updates()
|
report = releases.check_for_updates(settings)
|
||||||
assert_report_equal(report, UpdateReport())
|
assert_report_equal(report, UpdateReport())
|
||||||
|
|
||||||
# Test 2 - Check that a newer version triggers updates, and that the changelog is
|
# Test 2 - Check that a newer version triggers updates, and that the changelog is
|
||||||
# rendered from Markdown to HTML.
|
# rendered from Markdown to HTML.
|
||||||
mock_upstream_info["tag_name"] = "v99.9.9"
|
mock_upstream_info["tag_name"] = "v99.9.9"
|
||||||
report = updater.check_for_updates()
|
report = releases.check_for_updates(settings)
|
||||||
assert_report_equal(
|
assert_report_equal(
|
||||||
report, UpdateReport(version="99.9.9", changelog="<p>changelog</p>")
|
report, UpdateReport(version="99.9.9", changelog="<p>changelog</p>")
|
||||||
)
|
)
|
||||||
|
|
||||||
# Test 3 - Check that HTTP errors are converted to error reports.
|
# Test 3 - Check that HTTP errors are converted to error reports.
|
||||||
requests_mock.side_effect = Exception("failed") # type: ignore [attr-defined]
|
requests_mock.side_effect = Exception("failed") # type: ignore [attr-defined]
|
||||||
report = updater.check_for_updates()
|
report = releases.check_for_updates(settings)
|
||||||
error_msg = (
|
error_msg = f"Encountered an exception while checking {updater_module.releases.GH_RELEASE_URL}: failed"
|
||||||
f"Encountered an exception while checking {updater.GH_RELEASE_URL}: failed"
|
|
||||||
)
|
|
||||||
assert_report_equal(report, UpdateReport(error=error_msg))
|
assert_report_equal(report, UpdateReport(error=error_msg))
|
||||||
|
|
||||||
# Test 4 - Check that cached version/changelog info do not trigger an update check.
|
# Test 4 - Check that cached version/changelog info do not trigger an update check.
|
||||||
updater.dangerzone.settings.set("updater_latest_version", "99.9.9")
|
settings.set("updater_latest_version", "99.9.9")
|
||||||
updater.dangerzone.settings.set("updater_latest_changelog", "<p>changelog</p>")
|
settings.set("updater_latest_changelog", "<p>changelog</p>")
|
||||||
|
|
||||||
report = updater.check_for_updates()
|
report = releases.check_for_updates(settings)
|
||||||
assert_report_equal(
|
assert_report_equal(
|
||||||
report, UpdateReport(version="99.9.9", changelog="<p>changelog</p>")
|
report, UpdateReport(version="99.9.9", changelog="<p>changelog</p>")
|
||||||
)
|
)
|
||||||
|
@ -205,14 +209,16 @@ def test_update_checks(
|
||||||
|
|
||||||
def test_update_checks_cooldown(updater: UpdaterThread, mocker: MockerFixture) -> None:
|
def test_update_checks_cooldown(updater: UpdaterThread, mocker: MockerFixture) -> None:
|
||||||
"""Make sure Dangerzone only checks for updates every X hours"""
|
"""Make sure Dangerzone only checks for updates every X hours"""
|
||||||
updater.dangerzone.settings.set("updater_check", True)
|
settings = updater.dangerzone.settings
|
||||||
updater.dangerzone.settings.set("updater_last_check", 0)
|
|
||||||
|
settings.set("updater_check_all", True)
|
||||||
|
settings.set("updater_last_check", 0)
|
||||||
|
|
||||||
# Mock some functions before the tests start
|
# Mock some functions before the tests start
|
||||||
cooldown_spy = mocker.spy(updater, "_should_postpone_update_check")
|
cooldown_spy = mocker.spy(updater_module.releases, "_should_postpone_update_check")
|
||||||
timestamp_mock = mocker.patch.object(updater, "_get_now_timestamp")
|
timestamp_mock = mocker.patch.object(updater_module.releases, "_get_now_timestamp")
|
||||||
mocker.patch("dangerzone.gui.updater.requests.get")
|
mocker.patch("dangerzone.updater.releases.requests.get")
|
||||||
requests_mock = updater_module.requests.get
|
requests_mock = updater_module.releases.requests.get
|
||||||
|
|
||||||
# # Make requests.get().json() return the version info that we want.
|
# # Make requests.get().json() return the version info that we want.
|
||||||
mock_upstream_info = {"tag_name": "99.9.9", "body": "changelog"}
|
mock_upstream_info = {"tag_name": "99.9.9", "body": "changelog"}
|
||||||
|
@ -225,9 +231,9 @@ def test_update_checks_cooldown(updater: UpdaterThread, mocker: MockerFixture) -
|
||||||
curtime = int(time.time())
|
curtime = int(time.time())
|
||||||
timestamp_mock.return_value = curtime
|
timestamp_mock.return_value = curtime
|
||||||
|
|
||||||
report = updater.check_for_updates()
|
report = releases.check_for_updates(settings)
|
||||||
assert cooldown_spy.spy_return is False
|
assert cooldown_spy.spy_return is False
|
||||||
assert updater.dangerzone.settings.get("updater_last_check") == curtime
|
assert settings.get("updater_last_check") == curtime
|
||||||
assert_report_equal(report, UpdateReport("99.9.9", "<p>changelog</p>"))
|
assert_report_equal(report, UpdateReport("99.9.9", "<p>changelog</p>"))
|
||||||
|
|
||||||
# Test 2: Advance the current time by 1 second, and ensure that no update will take
|
# Test 2: Advance the current time by 1 second, and ensure that no update will take
|
||||||
|
@ -236,41 +242,39 @@ def test_update_checks_cooldown(updater: UpdaterThread, mocker: MockerFixture) -
|
||||||
curtime += 1
|
curtime += 1
|
||||||
timestamp_mock.return_value = curtime
|
timestamp_mock.return_value = curtime
|
||||||
requests_mock.side_effect = Exception("failed") # type: ignore [attr-defined]
|
requests_mock.side_effect = Exception("failed") # type: ignore [attr-defined]
|
||||||
updater.dangerzone.settings.set("updater_latest_version", get_version())
|
settings.set("updater_latest_version", get_version())
|
||||||
updater.dangerzone.settings.set("updater_latest_changelog", None)
|
settings.set("updater_latest_changelog", None)
|
||||||
|
|
||||||
report = updater.check_for_updates()
|
report = releases.check_for_updates(settings)
|
||||||
assert cooldown_spy.spy_return is True
|
assert cooldown_spy.spy_return is True
|
||||||
assert updater.dangerzone.settings.get("updater_last_check") == curtime - 1 # type: ignore [unreachable]
|
assert settings.get("updater_last_check") == curtime - 1 # type: ignore [unreachable]
|
||||||
assert_report_equal(report, UpdateReport())
|
assert_report_equal(report, UpdateReport())
|
||||||
|
|
||||||
# Test 3: Advance the current time by <cooldown period> seconds. Ensure that
|
# Test 3: Advance the current time by <cooldown period> seconds. Ensure that
|
||||||
# Dangerzone checks for updates again, and the last check timestamp gets bumped.
|
# Dangerzone checks for updates again, and the last check timestamp gets bumped.
|
||||||
curtime += updater_module.UPDATE_CHECK_COOLDOWN_SECS
|
curtime += updater_module.releases.UPDATE_CHECK_COOLDOWN_SECS
|
||||||
timestamp_mock.return_value = curtime
|
timestamp_mock.return_value = curtime
|
||||||
requests_mock.side_effect = None
|
requests_mock.side_effect = None
|
||||||
|
|
||||||
report = updater.check_for_updates()
|
report = releases.check_for_updates(settings)
|
||||||
assert cooldown_spy.spy_return is False
|
assert cooldown_spy.spy_return is False
|
||||||
assert updater.dangerzone.settings.get("updater_last_check") == curtime
|
assert settings.get("updater_last_check") == curtime
|
||||||
assert_report_equal(report, UpdateReport("99.9.9", "<p>changelog</p>"))
|
assert_report_equal(report, UpdateReport("99.9.9", "<p>changelog</p>"))
|
||||||
|
|
||||||
# Test 4: Make Dangerzone check for updates again, but this time, it should
|
# Test 4: Make Dangerzone check for updates again, but this time, it should
|
||||||
# encounter an error while doing so. In that case, the last check timestamp
|
# encounter an error while doing so. In that case, the last check timestamp
|
||||||
# should be bumped, so that subsequent checks don't take place.
|
# should be bumped, so that subsequent checks don't take place.
|
||||||
updater.dangerzone.settings.set("updater_latest_version", get_version())
|
settings.set("updater_latest_version", get_version())
|
||||||
updater.dangerzone.settings.set("updater_latest_changelog", None)
|
settings.set("updater_latest_changelog", None)
|
||||||
|
|
||||||
curtime += updater_module.UPDATE_CHECK_COOLDOWN_SECS
|
curtime += updater_module.releases.UPDATE_CHECK_COOLDOWN_SECS
|
||||||
timestamp_mock.return_value = curtime
|
timestamp_mock.return_value = curtime
|
||||||
requests_mock.side_effect = Exception("failed")
|
requests_mock.side_effect = Exception("failed")
|
||||||
|
|
||||||
report = updater.check_for_updates()
|
report = releases.check_for_updates(settings)
|
||||||
assert cooldown_spy.spy_return is False
|
assert cooldown_spy.spy_return is False
|
||||||
assert updater.dangerzone.settings.get("updater_last_check") == curtime
|
assert settings.get("updater_last_check") == curtime
|
||||||
error_msg = (
|
error_msg = f"Encountered an exception while checking {updater_module.releases.GH_RELEASE_URL}: failed"
|
||||||
f"Encountered an exception while checking {updater.GH_RELEASE_URL}: failed"
|
|
||||||
)
|
|
||||||
assert_report_equal(report, UpdateReport(error=error_msg))
|
assert_report_equal(report, UpdateReport(error=error_msg))
|
||||||
|
|
||||||
|
|
||||||
|
@ -278,16 +282,17 @@ def test_update_errors(
|
||||||
updater: UpdaterThread, monkeypatch: MonkeyPatch, mocker: MockerFixture
|
updater: UpdaterThread, monkeypatch: MonkeyPatch, mocker: MockerFixture
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test update check errors."""
|
"""Test update check errors."""
|
||||||
# Mock requests.get().
|
settings = updater.dangerzone.settings
|
||||||
mocker.patch("dangerzone.gui.updater.requests.get")
|
|
||||||
requests_mock = updater_module.requests.get
|
|
||||||
|
|
||||||
# Always assume that we can perform multiple update checks in a row.
|
# Always assume that we can perform multiple update checks in a row.
|
||||||
monkeypatch.setattr(updater, "_should_postpone_update_check", lambda: False)
|
monkeypatch.setattr(releases, "_should_postpone_update_check", lambda _: False)
|
||||||
|
|
||||||
|
# Mock requests.get().
|
||||||
|
mocker.patch("dangerzone.updater.releases.requests.get")
|
||||||
|
requests_mock = releases.requests.get
|
||||||
|
|
||||||
# Test 1 - Check that request exceptions are being detected as errors.
|
# Test 1 - Check that request exceptions are being detected as errors.
|
||||||
requests_mock.side_effect = Exception("bad url") # type: ignore [attr-defined]
|
requests_mock.side_effect = Exception("bad url") # type: ignore [attr-defined]
|
||||||
report = updater.check_for_updates()
|
report = releases.check_for_updates(settings)
|
||||||
assert report.error is not None
|
assert report.error is not None
|
||||||
assert "bad url" in report.error
|
assert "bad url" in report.error
|
||||||
assert "Encountered an exception" in report.error
|
assert "Encountered an exception" in report.error
|
||||||
|
@ -298,7 +303,7 @@ def test_update_errors(
|
||||||
|
|
||||||
requests_mock.return_value = MockResponse500() # type: ignore [attr-defined]
|
requests_mock.return_value = MockResponse500() # type: ignore [attr-defined]
|
||||||
requests_mock.side_effect = None # type: ignore [attr-defined]
|
requests_mock.side_effect = None # type: ignore [attr-defined]
|
||||||
report = updater.check_for_updates()
|
report = releases.check_for_updates(settings)
|
||||||
assert report.error is not None
|
assert report.error is not None
|
||||||
assert "Encountered an HTTP 500 error" in report.error
|
assert "Encountered an HTTP 500 error" in report.error
|
||||||
|
|
||||||
|
@ -310,7 +315,7 @@ def test_update_errors(
|
||||||
return json.loads("bad json")
|
return json.loads("bad json")
|
||||||
|
|
||||||
requests_mock.return_value = MockResponseBadJSON() # type: ignore [attr-defined]
|
requests_mock.return_value = MockResponseBadJSON() # type: ignore [attr-defined]
|
||||||
report = updater.check_for_updates()
|
report = releases.check_for_updates(settings)
|
||||||
assert report.error is not None
|
assert report.error is not None
|
||||||
assert "Received a non-JSON response" in report.error
|
assert "Received a non-JSON response" in report.error
|
||||||
|
|
||||||
|
@ -322,7 +327,7 @@ def test_update_errors(
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
requests_mock.return_value = MockResponseEmpty() # type: ignore [attr-defined]
|
requests_mock.return_value = MockResponseEmpty() # type: ignore [attr-defined]
|
||||||
report = updater.check_for_updates()
|
report = releases.check_for_updates(settings)
|
||||||
assert report.error is not None
|
assert report.error is not None
|
||||||
assert "Missing required fields in JSON" in report.error
|
assert "Missing required fields in JSON" in report.error
|
||||||
|
|
||||||
|
@ -334,7 +339,7 @@ def test_update_errors(
|
||||||
return {"tag_name": "vbad_version", "body": "changelog"}
|
return {"tag_name": "vbad_version", "body": "changelog"}
|
||||||
|
|
||||||
requests_mock.return_value = MockResponseBadVersion() # type: ignore [attr-defined]
|
requests_mock.return_value = MockResponseBadVersion() # type: ignore [attr-defined]
|
||||||
report = updater.check_for_updates()
|
report = releases.check_for_updates(settings)
|
||||||
assert report.error is not None
|
assert report.error is not None
|
||||||
assert "Invalid version" in report.error
|
assert "Invalid version" in report.error
|
||||||
|
|
||||||
|
@ -346,7 +351,7 @@ def test_update_errors(
|
||||||
return {"tag_name": "v99.9.9", "body": ["bad", "markdown"]}
|
return {"tag_name": "v99.9.9", "body": ["bad", "markdown"]}
|
||||||
|
|
||||||
requests_mock.return_value = MockResponseBadMarkdown() # type: ignore [attr-defined]
|
requests_mock.return_value = MockResponseBadMarkdown() # type: ignore [attr-defined]
|
||||||
report = updater.check_for_updates()
|
report = releases.check_for_updates(settings)
|
||||||
assert report.error is not None
|
assert report.error is not None
|
||||||
|
|
||||||
# Test 7 - Check that a valid response passes.
|
# Test 7 - Check that a valid response passes.
|
||||||
|
@ -357,7 +362,7 @@ def test_update_errors(
|
||||||
return {"tag_name": "v99.9.9", "body": "changelog"}
|
return {"tag_name": "v99.9.9", "body": "changelog"}
|
||||||
|
|
||||||
requests_mock.return_value = MockResponseValid() # type: ignore [attr-defined]
|
requests_mock.return_value = MockResponseValid() # type: ignore [attr-defined]
|
||||||
report = updater.check_for_updates()
|
report = releases.check_for_updates(settings)
|
||||||
assert_report_equal(report, UpdateReport("99.9.9", "<p>changelog</p>"))
|
assert_report_equal(report, UpdateReport("99.9.9", "<p>changelog</p>"))
|
||||||
|
|
||||||
|
|
||||||
|
@ -367,24 +372,28 @@ def test_update_check_prompt(
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Test that the prompt to enable update checks works properly."""
|
"""Test that the prompt to enable update checks works properly."""
|
||||||
# Force Dangerzone to check immediately for updates
|
# Force Dangerzone to check immediately for updates
|
||||||
qt_updater.dangerzone.settings.set("updater_last_check", 0)
|
settings = qt_updater.dangerzone.settings
|
||||||
|
settings.set("updater_last_check", 0)
|
||||||
|
|
||||||
# Test 1 - Check that on the second run of Dangerzone, the user is prompted to
|
# Test 1 - Check that on the second run of Dangerzone, the user is prompted to
|
||||||
# choose if they want to enable update checks.
|
# choose if they want to enable update checks.
|
||||||
def check_button_labels() -> None:
|
def check_button_labels() -> None:
|
||||||
dialog = qt_updater.dangerzone.app.activeWindow()
|
dialog = qt_updater.dangerzone.app.activeWindow()
|
||||||
assert dialog.ok_button.text() == "Check Automatically" # type: ignore [attr-defined]
|
assert dialog.ok_button.text() == "Enable sandbox updates" # type: ignore [attr-defined]
|
||||||
assert dialog.cancel_button.text() == "Don't Check" # type: ignore [attr-defined]
|
assert dialog.cancel_button.text() == "Do not make any requests" # type: ignore [attr-defined]
|
||||||
dialog.ok_button.click() # type: ignore [attr-defined]
|
dialog.ok_button.click() # type: ignore [attr-defined]
|
||||||
|
|
||||||
QtCore.QTimer.singleShot(500, check_button_labels)
|
QtCore.QTimer.singleShot(500, check_button_labels)
|
||||||
|
mocker.patch(
|
||||||
|
"dangerzone.updater.releases._should_postpone_update_check", return_value=False
|
||||||
|
)
|
||||||
res = qt_updater.should_check_for_updates()
|
res = qt_updater.should_check_for_updates()
|
||||||
|
|
||||||
assert res is True
|
assert res is True
|
||||||
|
|
||||||
# Test 2 - Check that when the user chooses to enable update checks, we
|
# Test 2 - Check that when the user chooses to enable update checks, we
|
||||||
# store that decision in the settings.
|
# store that decision in the settings.
|
||||||
qt_updater.check = None
|
settings.set("updater_check_all", None, autosave=True)
|
||||||
|
|
||||||
def click_ok() -> None:
|
def click_ok() -> None:
|
||||||
dialog = qt_updater.dangerzone.app.activeWindow()
|
dialog = qt_updater.dangerzone.app.activeWindow()
|
||||||
|
@ -394,11 +403,11 @@ def test_update_check_prompt(
|
||||||
res = qt_updater.should_check_for_updates()
|
res = qt_updater.should_check_for_updates()
|
||||||
|
|
||||||
assert res is True
|
assert res is True
|
||||||
assert qt_updater.check is True
|
assert settings.get("updater_check_all") is True
|
||||||
|
|
||||||
# Test 3 - Same as the previous test, but check that clicking on cancel stores the
|
# Test 3 - Same as the previous test, but check that clicking on cancel stores the
|
||||||
# opposite decision.
|
# opposite decision.
|
||||||
qt_updater.check = None # type: ignore [unreachable]
|
settings.set("updater_check_all", None) # type: ignore [unreachable]
|
||||||
|
|
||||||
def click_cancel() -> None:
|
def click_cancel() -> None:
|
||||||
dialog = qt_updater.dangerzone.app.activeWindow()
|
dialog = qt_updater.dangerzone.app.activeWindow()
|
||||||
|
@ -408,11 +417,11 @@ def test_update_check_prompt(
|
||||||
res = qt_updater.should_check_for_updates()
|
res = qt_updater.should_check_for_updates()
|
||||||
|
|
||||||
assert res is False
|
assert res is False
|
||||||
assert qt_updater.check is False
|
assert settings.get("updater_check_all") is False
|
||||||
|
|
||||||
# Test 4 - Same as the previous test, but check that clicking on "X" does not store
|
# Test 4 - Same as the previous test, but check that clicking on "X" does not store
|
||||||
# any decision.
|
# any decision.
|
||||||
qt_updater.check = None
|
settings.set("updater_check_all", None, autosave=True)
|
||||||
|
|
||||||
def click_x() -> None:
|
def click_x() -> None:
|
||||||
dialog = qt_updater.dangerzone.app.activeWindow()
|
dialog = qt_updater.dangerzone.app.activeWindow()
|
||||||
|
@ -422,4 +431,4 @@ def test_update_check_prompt(
|
||||||
res = qt_updater.should_check_for_updates()
|
res = qt_updater.should_check_for_updates()
|
||||||
|
|
||||||
assert res is False
|
assert res is False
|
||||||
assert qt_updater.check is None
|
assert settings.get("updater_check_all") is None
|
||||||
|
|
|
@ -6,9 +6,10 @@ from pytest_mock import MockerFixture
|
||||||
from pytest_subprocess import FakeProcess
|
from pytest_subprocess import FakeProcess
|
||||||
|
|
||||||
from dangerzone import errors
|
from dangerzone import errors
|
||||||
from dangerzone.container_utils import Runtime
|
from dangerzone.container_utils import CONTAINER_NAME, Runtime
|
||||||
from dangerzone.isolation_provider.container import Container
|
from dangerzone.isolation_provider.container import Container
|
||||||
from dangerzone.isolation_provider.qubes import is_qubes_native_conversion
|
from dangerzone.isolation_provider.qubes import is_qubes_native_conversion
|
||||||
|
from dangerzone.updater import SignatureError, UpdaterError
|
||||||
from dangerzone.util import get_resource_path
|
from dangerzone.util import get_resource_path
|
||||||
|
|
||||||
from .base import IsolationProviderTermination, IsolationProviderTest
|
from .base import IsolationProviderTermination, IsolationProviderTest
|
||||||
|
@ -21,7 +22,7 @@ elif os.environ.get("DUMMY_CONVERSION", False):
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def provider() -> Container:
|
def provider(skip_image_verification: None) -> Container:
|
||||||
return Container()
|
return Container()
|
||||||
|
|
||||||
|
|
||||||
|
@ -57,8 +58,13 @@ class TestContainer(IsolationProviderTest):
|
||||||
)
|
)
|
||||||
provider.is_available()
|
provider.is_available()
|
||||||
|
|
||||||
def test_install_raise_if_image_cant_be_installed(
|
def test_install_raise_if_local_image_cant_be_installed(
|
||||||
self, provider: Container, fp: FakeProcess, runtime_path: str
|
self,
|
||||||
|
provider: Container,
|
||||||
|
fp: FakeProcess,
|
||||||
|
runtime_path: str,
|
||||||
|
skip_image_verification,
|
||||||
|
mocker: MockerFixture,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""When an image installation fails, an exception should be raised"""
|
"""When an image installation fails, an exception should be raised"""
|
||||||
|
|
||||||
|
@ -74,60 +80,85 @@ class TestContainer(IsolationProviderTest):
|
||||||
"list",
|
"list",
|
||||||
"--format",
|
"--format",
|
||||||
"{{ .Tag }}",
|
"{{ .Tag }}",
|
||||||
"dangerzone.rocks/dangerzone",
|
CONTAINER_NAME,
|
||||||
],
|
],
|
||||||
occurrences=2,
|
occurrences=2,
|
||||||
)
|
)
|
||||||
|
mocker.patch(
|
||||||
fp.register_subprocess(
|
"dangerzone.isolation_provider.container.install_local_container_tar",
|
||||||
[
|
side_effect=UpdaterError,
|
||||||
runtime_path,
|
|
||||||
"load",
|
|
||||||
"-i",
|
|
||||||
get_resource_path("container.tar").absolute(),
|
|
||||||
],
|
|
||||||
returncode=-1,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
with pytest.raises(errors.ImageInstallationException):
|
with pytest.raises(UpdaterError):
|
||||||
provider.install()
|
provider.install(should_upgrade=False)
|
||||||
|
|
||||||
def test_install_raises_if_still_not_installed(
|
def test_install_raise_if_local_image_cant_be_verified(
|
||||||
self, provider: Container, fp: FakeProcess, runtime_path: str
|
self,
|
||||||
|
provider: Container,
|
||||||
|
runtime_path: str,
|
||||||
|
skip_image_verification,
|
||||||
|
mocker: MockerFixture,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""When an image keep being not installed, it should return False"""
|
"""In case an image has been installed but its signature cannot be verified, an exception should be raised"""
|
||||||
fp.register_subprocess(
|
|
||||||
[runtime_path, "version", "-f", "{{.Client.Version}}"],
|
mocker.patch(
|
||||||
stdout="4.0.0",
|
"dangerzone.isolation_provider.container.container_utils.list_image_tags",
|
||||||
|
return_value=["a-tag"],
|
||||||
|
)
|
||||||
|
mocker.patch(
|
||||||
|
"dangerzone.isolation_provider.container.verify_local_image",
|
||||||
|
side_effect=SignatureError,
|
||||||
)
|
)
|
||||||
|
|
||||||
fp.register_subprocess(
|
with pytest.raises(SignatureError):
|
||||||
[runtime_path, "image", "ls"],
|
provider.install(should_upgrade=False)
|
||||||
|
|
||||||
|
def test_install_raise_if_local_image_install_works_on_second_try(
|
||||||
|
self,
|
||||||
|
provider: Container,
|
||||||
|
runtime_path: str,
|
||||||
|
skip_image_verification,
|
||||||
|
mocker: MockerFixture,
|
||||||
|
) -> None:
|
||||||
|
"""In case an image has been installed but its signature cannot be verified, an exception should be raised"""
|
||||||
|
|
||||||
|
mocker.patch(
|
||||||
|
"dangerzone.isolation_provider.container.container_utils.list_image_tags",
|
||||||
|
return_value=["a-tag"],
|
||||||
|
)
|
||||||
|
mocker.patch(
|
||||||
|
"dangerzone.isolation_provider.container.verify_local_image",
|
||||||
|
side_effect=[SignatureError, True],
|
||||||
)
|
)
|
||||||
|
|
||||||
# First check should return nothing.
|
provider.install(should_upgrade=False)
|
||||||
fp.register_subprocess(
|
|
||||||
[
|
def test_install_upgrades_if_available(
|
||||||
runtime_path,
|
self,
|
||||||
"image",
|
provider: Container,
|
||||||
"list",
|
runtime_path: str,
|
||||||
"--format",
|
skip_image_verification,
|
||||||
"{{ .Tag }}",
|
mocker: MockerFixture,
|
||||||
"dangerzone.rocks/dangerzone",
|
) -> None:
|
||||||
],
|
"""In case an image has been installed but its signature cannot be verified, an exception should be raised"""
|
||||||
occurrences=2,
|
|
||||||
|
mocker.patch(
|
||||||
|
"dangerzone.isolation_provider.container.container_utils.list_image_tags",
|
||||||
|
return_value=["a-tag"],
|
||||||
|
)
|
||||||
|
mocker.patch(
|
||||||
|
"dangerzone.isolation_provider.container.is_update_available",
|
||||||
|
return_value=(True, "digest"),
|
||||||
|
)
|
||||||
|
upgrade = mocker.patch(
|
||||||
|
"dangerzone.isolation_provider.container.upgrade_container_image",
|
||||||
|
)
|
||||||
|
mocker.patch(
|
||||||
|
"dangerzone.isolation_provider.container.verify_local_image",
|
||||||
)
|
)
|
||||||
|
|
||||||
fp.register_subprocess(
|
provider.install(should_upgrade=True)
|
||||||
[
|
upgrade.assert_called()
|
||||||
runtime_path,
|
|
||||||
"load",
|
|
||||||
"-i",
|
|
||||||
get_resource_path("container.tar").absolute(),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
with pytest.raises(errors.ImageNotPresentException):
|
|
||||||
provider.install()
|
|
||||||
|
|
||||||
@pytest.mark.skipif(
|
@pytest.mark.skipif(
|
||||||
platform.system() not in ("Windows", "Darwin"),
|
platform.system() not in ("Windows", "Darwin"),
|
||||||
|
|
|
@ -202,7 +202,12 @@ class TestCliConversion(TestCliBasic):
|
||||||
result.assert_success()
|
result.assert_success()
|
||||||
|
|
||||||
@for_each_doc
|
@for_each_doc
|
||||||
def test_formats(self, doc: Path, tmp_path_factory: pytest.TempPathFactory) -> None:
|
def test_formats(
|
||||||
|
self,
|
||||||
|
doc: Path,
|
||||||
|
tmp_path_factory: pytest.TempPathFactory,
|
||||||
|
skip_image_verification: pytest.FixtureRequest,
|
||||||
|
) -> None:
|
||||||
reference = (doc.parent / "reference" / doc.stem).with_suffix(".pdf")
|
reference = (doc.parent / "reference" / doc.stem).with_suffix(".pdf")
|
||||||
destination = tmp_path_factory.mktemp(doc.stem).with_suffix(".pdf")
|
destination = tmp_path_factory.mktemp(doc.stem).with_suffix(".pdf")
|
||||||
|
|
||||||
|
|
238
tests/test_registry.py
Normal file
238
tests/test_registry.py
Normal file
|
@ -0,0 +1,238 @@
|
||||||
|
import hashlib
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import requests
|
||||||
|
from pytest_mock import MockerFixture
|
||||||
|
|
||||||
|
from dangerzone.updater.registry import (
|
||||||
|
Image,
|
||||||
|
_get_auth_header,
|
||||||
|
_url,
|
||||||
|
get_manifest,
|
||||||
|
get_manifest_digest,
|
||||||
|
list_tags,
|
||||||
|
parse_image_location,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_image_location_no_tag():
|
||||||
|
"""Test that parse_image_location correctly handles an image location without a tag."""
|
||||||
|
image_str = "ghcr.io/freedomofpress/dangerzone"
|
||||||
|
image = parse_image_location(image_str)
|
||||||
|
|
||||||
|
assert isinstance(image, Image)
|
||||||
|
assert image.registry == "ghcr.io"
|
||||||
|
assert image.namespace == "freedomofpress"
|
||||||
|
assert image.image_name == "dangerzone"
|
||||||
|
assert image.tag == "latest" # Default tag should be "latest"
|
||||||
|
assert image.digest is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_image_location_with_tag():
|
||||||
|
"""Test that parse_image_location correctly handles an image location with a tag."""
|
||||||
|
image_str = "ghcr.io/freedomofpress/dangerzone:v0.4.2"
|
||||||
|
image = parse_image_location(image_str)
|
||||||
|
|
||||||
|
assert isinstance(image, Image)
|
||||||
|
assert image.registry == "ghcr.io"
|
||||||
|
assert image.namespace == "freedomofpress"
|
||||||
|
assert image.image_name == "dangerzone"
|
||||||
|
assert image.tag == "v0.4.2"
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_image_location_tag_plus_digest():
|
||||||
|
"""Test that parse_image_location handles an image location with a tag that includes a digest."""
|
||||||
|
image_str = (
|
||||||
|
"ghcr.io/freedomofpress/dangerzone"
|
||||||
|
":20250205-0.8.0-148-ge67fbc1"
|
||||||
|
"@sha256:19e8eacd75879d05f6621c2ea8dd955e68ee3e07b41b9d53f4c8cc9929a68a67"
|
||||||
|
)
|
||||||
|
|
||||||
|
image = parse_image_location(image_str)
|
||||||
|
|
||||||
|
assert isinstance(image, Image)
|
||||||
|
assert image.registry == "ghcr.io"
|
||||||
|
assert image.namespace == "freedomofpress"
|
||||||
|
assert image.image_name == "dangerzone"
|
||||||
|
assert image.tag == "20250205-0.8.0-148-ge67fbc1"
|
||||||
|
assert (
|
||||||
|
image.digest
|
||||||
|
== "sha256:19e8eacd75879d05f6621c2ea8dd955e68ee3e07b41b9d53f4c8cc9929a68a67"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_invalid_image_location():
|
||||||
|
"""Test that parse_image_location raises an error for invalid image locations."""
|
||||||
|
invalid_image_locations = [
|
||||||
|
"ghcr.io/dangerzone", # Missing namespace
|
||||||
|
"ghcr.io/freedomofpress/dangerzone:", # Empty tag
|
||||||
|
"freedomofpress/dangerzone", # Missing registry
|
||||||
|
"ghcr.io:freedomofpress/dangerzone", # Invalid format
|
||||||
|
"", # Empty string
|
||||||
|
]
|
||||||
|
|
||||||
|
for invalid_image in invalid_image_locations:
|
||||||
|
with pytest.raises(ValueError, match="Malformed image location"):
|
||||||
|
parse_image_location(invalid_image)
|
||||||
|
|
||||||
|
|
||||||
|
def test_list_tags(mocker: MockerFixture):
|
||||||
|
"""Test that list_tags correctly retrieves tags from the registry."""
|
||||||
|
# Mock the authentication response
|
||||||
|
image_str = "ghcr.io/freedomofpress/dangerzone"
|
||||||
|
|
||||||
|
# Mock requests.get to return appropriate values for both calls
|
||||||
|
mock_response_auth = mocker.Mock()
|
||||||
|
mock_response_auth.json.return_value = {"token": "dummy_token"}
|
||||||
|
mock_response_auth.raise_for_status.return_value = None
|
||||||
|
|
||||||
|
mock_response_tags = mocker.Mock()
|
||||||
|
mock_response_tags.json.return_value = {
|
||||||
|
"tags": ["v0.4.0", "v0.4.1", "v0.4.2", "latest"]
|
||||||
|
}
|
||||||
|
mock_response_tags.raise_for_status.return_value = None
|
||||||
|
|
||||||
|
# Setup the mock to return different responses for each URL
|
||||||
|
def mock_get(url, **kwargs):
|
||||||
|
if "token" in url:
|
||||||
|
return mock_response_auth
|
||||||
|
else:
|
||||||
|
return mock_response_tags
|
||||||
|
|
||||||
|
mocker.patch("requests.get", side_effect=mock_get)
|
||||||
|
|
||||||
|
# Call the function
|
||||||
|
tags = list_tags(image_str)
|
||||||
|
|
||||||
|
# Verify the result
|
||||||
|
assert tags == ["v0.4.0", "v0.4.1", "v0.4.2", "latest"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_list_tags_auth_error(mocker: MockerFixture):
|
||||||
|
"""Test that list_tags handles authentication errors correctly."""
|
||||||
|
image_str = "ghcr.io/freedomofpress/dangerzone"
|
||||||
|
|
||||||
|
# Mock requests.get to raise an HTTPError
|
||||||
|
mock_response = mocker.Mock()
|
||||||
|
mock_response.raise_for_status.side_effect = requests.exceptions.HTTPError(
|
||||||
|
"401 Client Error: Unauthorized"
|
||||||
|
)
|
||||||
|
|
||||||
|
mocker.patch("requests.get", return_value=mock_response)
|
||||||
|
|
||||||
|
# Call the function and expect an error
|
||||||
|
with pytest.raises(requests.exceptions.HTTPError):
|
||||||
|
list_tags(image_str)
|
||||||
|
|
||||||
|
|
||||||
|
def test_list_tags_registry_error(mocker: MockerFixture):
|
||||||
|
"""Test that list_tags handles registry errors correctly."""
|
||||||
|
image_str = "ghcr.io/freedomofpress/dangerzone"
|
||||||
|
|
||||||
|
# Mock requests.get to return success for auth but error for tags
|
||||||
|
mock_response_auth = mocker.Mock()
|
||||||
|
mock_response_auth.json.return_value = {"token": "dummy_token"}
|
||||||
|
mock_response_auth.raise_for_status.return_value = None
|
||||||
|
|
||||||
|
mock_response_tags = mocker.Mock()
|
||||||
|
mock_response_tags.raise_for_status.side_effect = requests.exceptions.HTTPError(
|
||||||
|
"404 Client Error: Not Found"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Setup the mock to return different responses for each URL
|
||||||
|
def mock_get(url, **kwargs):
|
||||||
|
if "token" in url:
|
||||||
|
return mock_response_auth
|
||||||
|
else:
|
||||||
|
return mock_response_tags
|
||||||
|
|
||||||
|
mocker.patch("requests.get", side_effect=mock_get)
|
||||||
|
|
||||||
|
# Call the function and expect an error
|
||||||
|
with pytest.raises(requests.exceptions.HTTPError):
|
||||||
|
list_tags(image_str)
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_manifest(mocker: MockerFixture):
|
||||||
|
"""Test that get_manifest correctly retrieves manifests from the registry."""
|
||||||
|
image_str = "ghcr.io/freedomofpress/dangerzone:v0.4.2"
|
||||||
|
|
||||||
|
# Mock the responses
|
||||||
|
manifest_content = {
|
||||||
|
"schemaVersion": 2,
|
||||||
|
"mediaType": "application/vnd.docker.distribution.manifest.v2+json",
|
||||||
|
"config": {
|
||||||
|
"mediaType": "application/vnd.docker.container.image.v1+json",
|
||||||
|
"size": 1234,
|
||||||
|
"digest": "sha256:abc123def456",
|
||||||
|
},
|
||||||
|
"layers": [
|
||||||
|
{
|
||||||
|
"mediaType": "application/vnd.docker.image.rootfs.diff.tar.gzip",
|
||||||
|
"size": 12345,
|
||||||
|
"digest": "sha256:layer1",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
mock_response_auth = mocker.Mock()
|
||||||
|
mock_response_auth.json.return_value = {"token": "dummy_token"}
|
||||||
|
mock_response_auth.raise_for_status.return_value = None
|
||||||
|
|
||||||
|
mock_response_manifest = mocker.Mock()
|
||||||
|
mock_response_manifest.json.return_value = manifest_content
|
||||||
|
mock_response_manifest.status_code = 200
|
||||||
|
mock_response_manifest.raise_for_status.return_value = None
|
||||||
|
|
||||||
|
# Setup the mock to return different responses for each URL
|
||||||
|
def mock_get(url, **kwargs):
|
||||||
|
if "token" in url:
|
||||||
|
return mock_response_auth
|
||||||
|
else:
|
||||||
|
return mock_response_manifest
|
||||||
|
|
||||||
|
mocker.patch("requests.get", side_effect=mock_get)
|
||||||
|
|
||||||
|
# Call the function
|
||||||
|
response = get_manifest(image_str)
|
||||||
|
|
||||||
|
# Verify the result
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json() == manifest_content
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_manifest_digest():
|
||||||
|
"""Test that get_manifest_digest correctly calculates the manifest digest."""
|
||||||
|
# Create a sample manifest content
|
||||||
|
manifest_content = b'{"schemaVersion":2,"mediaType":"application/vnd.docker.distribution.manifest.v2+json"}'
|
||||||
|
|
||||||
|
# Calculate the expected digest manually
|
||||||
|
import hashlib
|
||||||
|
|
||||||
|
expected_digest = hashlib.sha256(manifest_content).hexdigest()
|
||||||
|
|
||||||
|
# Call the function with the content directly
|
||||||
|
digest = get_manifest_digest("unused_image_str", manifest_content)
|
||||||
|
|
||||||
|
# Verify the result
|
||||||
|
assert digest == expected_digest
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_manifest_digest_from_registry(mocker: MockerFixture):
|
||||||
|
"""Test that get_manifest_digest correctly retrieves and calculates digests from the registry."""
|
||||||
|
image_str = "ghcr.io/freedomofpress/dangerzone:v0.4.2"
|
||||||
|
|
||||||
|
# Sample manifest content
|
||||||
|
manifest_content = b'{"schemaVersion":2,"mediaType":"application/vnd.docker.distribution.manifest.v2+json"}'
|
||||||
|
expected_digest = hashlib.sha256(manifest_content).hexdigest()
|
||||||
|
|
||||||
|
# Mock get_manifest
|
||||||
|
mock_response = mocker.Mock()
|
||||||
|
mock_response.content = manifest_content
|
||||||
|
mocker.patch("dangerzone.updater.registry.get_manifest", return_value=mock_response)
|
||||||
|
|
||||||
|
# Call the function
|
||||||
|
digest = get_manifest_digest(image_str)
|
||||||
|
|
||||||
|
# Verify the result
|
||||||
|
assert digest == expected_digest
|
389
tests/test_signatures.py
Normal file
389
tests/test_signatures.py
Normal file
|
@ -0,0 +1,389 @@
|
||||||
|
import json
|
||||||
|
import unittest
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from pytest_subprocess import FakeProcess
|
||||||
|
|
||||||
|
from dangerzone import errors as dzerrors
|
||||||
|
from dangerzone.updater import errors
|
||||||
|
from dangerzone.updater.signatures import (
|
||||||
|
Signature,
|
||||||
|
get_last_log_index,
|
||||||
|
get_log_index_from_signatures,
|
||||||
|
get_remote_signatures,
|
||||||
|
is_update_available,
|
||||||
|
load_and_verify_signatures,
|
||||||
|
prepare_airgapped_archive,
|
||||||
|
store_signatures,
|
||||||
|
upgrade_container_image,
|
||||||
|
verify_local_image,
|
||||||
|
verify_signature,
|
||||||
|
verify_signatures,
|
||||||
|
)
|
||||||
|
|
||||||
|
ASSETS_PATH = Path(__file__).parent / "assets"
|
||||||
|
TEST_PUBKEY_PATH = ASSETS_PATH / "test.pub.key"
|
||||||
|
INVALID_SIGNATURES_PATH = ASSETS_PATH / "signatures" / "invalid"
|
||||||
|
VALID_SIGNATURES_PATH = ASSETS_PATH / "signatures" / "valid"
|
||||||
|
TEMPERED_SIGNATURES_PATH = ASSETS_PATH / "signatures" / "tempered"
|
||||||
|
|
||||||
|
RANDOM_DIGEST = "aacc9b586648bbe3040f2822153b1d5ead2779af45ff750fd6f04daf4a9f64b4"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def valid_signature():
|
||||||
|
signature_file = next(VALID_SIGNATURES_PATH.glob("**/*.json"))
|
||||||
|
with open(signature_file, "r") as signature_file:
|
||||||
|
signatures = json.load(signature_file)
|
||||||
|
return signatures.pop()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def tempered_signature():
|
||||||
|
signature_file = next(TEMPERED_SIGNATURES_PATH.glob("**/*.json"))
|
||||||
|
with open(signature_file, "r") as signature_file:
|
||||||
|
signatures = json.load(signature_file)
|
||||||
|
return signatures.pop()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def signature_other_digest(valid_signature):
|
||||||
|
signature = valid_signature.copy()
|
||||||
|
signature["Bundle"]["Payload"]["digest"] = "sha256:123456"
|
||||||
|
return signature
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_valid_signatures(mocker):
|
||||||
|
mocker.patch("dangerzone.updater.signatures.SIGNATURES_PATH", VALID_SIGNATURES_PATH)
|
||||||
|
valid_signatures = list(VALID_SIGNATURES_PATH.glob("**/*.json"))
|
||||||
|
assert len(valid_signatures) > 0
|
||||||
|
for file in valid_signatures:
|
||||||
|
signatures = load_and_verify_signatures(file.stem, TEST_PUBKEY_PATH)
|
||||||
|
assert isinstance(signatures, list)
|
||||||
|
assert len(signatures) > 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_invalid_signatures(mocker):
|
||||||
|
mocker.patch(
|
||||||
|
"dangerzone.updater.signatures.SIGNATURES_PATH", INVALID_SIGNATURES_PATH
|
||||||
|
)
|
||||||
|
invalid_signatures = list(INVALID_SIGNATURES_PATH.glob("**/*.json"))
|
||||||
|
assert len(invalid_signatures) > 0
|
||||||
|
for file in invalid_signatures:
|
||||||
|
with pytest.raises(errors.SignatureError):
|
||||||
|
load_and_verify_signatures(file.stem, TEST_PUBKEY_PATH)
|
||||||
|
|
||||||
|
|
||||||
|
def test_load_tempered_signatures(mocker):
|
||||||
|
mocker.patch(
|
||||||
|
"dangerzone.updater.signatures.SIGNATURES_PATH", TEMPERED_SIGNATURES_PATH
|
||||||
|
)
|
||||||
|
tempered_signatures = list(TEMPERED_SIGNATURES_PATH.glob("**/*.json"))
|
||||||
|
assert len(tempered_signatures) > 0
|
||||||
|
for file in tempered_signatures:
|
||||||
|
with pytest.raises(errors.SignatureError):
|
||||||
|
load_and_verify_signatures(file.stem, TEST_PUBKEY_PATH)
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_log_index_from_signatures():
|
||||||
|
signatures = [{"Bundle": {"Payload": {"logIndex": 1}}}]
|
||||||
|
assert get_log_index_from_signatures(signatures) == 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_log_index_from_signatures_empty():
|
||||||
|
signatures = []
|
||||||
|
assert get_log_index_from_signatures(signatures) == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_log_index_from_malformed_signatures():
|
||||||
|
signatures = [{"Bundle": {"Payload": {"logIndex": "foo"}}}]
|
||||||
|
assert get_log_index_from_signatures(signatures) == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_log_index_from_missing_log_index():
|
||||||
|
signatures = [{"Bundle": {"Payload": {}}}]
|
||||||
|
assert get_log_index_from_signatures(signatures) == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_upgrade_container_image_if_already_up_to_date(mocker):
|
||||||
|
mocker.patch(
|
||||||
|
"dangerzone.updater.registry.is_new_remote_image_available",
|
||||||
|
return_value=(False, None),
|
||||||
|
)
|
||||||
|
with pytest.raises(errors.ImageAlreadyUpToDate):
|
||||||
|
upgrade_container_image(
|
||||||
|
"ghcr.io/freedomofpress/dangerzone/dangerzone", "sha256:123456", "test.pub"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_upgrade_container_without_signatures(mocker):
|
||||||
|
mocker.patch(
|
||||||
|
"dangerzone.updater.registry.is_new_remote_image_available",
|
||||||
|
return_value=(True, "sha256:123456"),
|
||||||
|
)
|
||||||
|
mocker.patch("dangerzone.updater.signatures.get_remote_signatures", return_value=[])
|
||||||
|
with pytest.raises(errors.SignatureVerificationError):
|
||||||
|
upgrade_container_image(
|
||||||
|
"ghcr.io/freedomofpress/dangerzone/dangerzone",
|
||||||
|
"sha256:123456",
|
||||||
|
"test.pub",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_upgrade_container_lower_log_index(mocker):
|
||||||
|
image_digest = "4da441235e84e93518778827a5c5745d532d7a4079886e1647924bee7ef1c14d"
|
||||||
|
signatures = load_and_verify_signatures(
|
||||||
|
image_digest,
|
||||||
|
TEST_PUBKEY_PATH,
|
||||||
|
bypass_verification=True,
|
||||||
|
signatures_path=VALID_SIGNATURES_PATH,
|
||||||
|
)
|
||||||
|
mocker.patch(
|
||||||
|
"dangerzone.updater.registry.is_new_remote_image_available",
|
||||||
|
return_value=(
|
||||||
|
True,
|
||||||
|
image_digest,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
mocker.patch(
|
||||||
|
"dangerzone.updater.signatures.get_remote_signatures",
|
||||||
|
return_value=signatures,
|
||||||
|
)
|
||||||
|
# Mock to avoid loosing time on test failures
|
||||||
|
mocker.patch("dangerzone.container_utils.container_pull")
|
||||||
|
# The log index of the incoming signatures is 168652066
|
||||||
|
mocker.patch(
|
||||||
|
"dangerzone.updater.signatures.get_last_log_index",
|
||||||
|
return_value=168652067,
|
||||||
|
)
|
||||||
|
|
||||||
|
with pytest.raises(errors.InvalidLogIndex):
|
||||||
|
upgrade_container_image(
|
||||||
|
"ghcr.io/freedomofpress/dangerzone/dangerzone",
|
||||||
|
image_digest,
|
||||||
|
TEST_PUBKEY_PATH,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_prepare_airgapped_archive_requires_digest():
|
||||||
|
with pytest.raises(errors.AirgappedImageDownloadError):
|
||||||
|
prepare_airgapped_archive(
|
||||||
|
"ghcr.io/freedomofpress/dangerzone/dangerzone", "test.tar"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_remote_signatures_error(fp: FakeProcess, mocker):
|
||||||
|
image = "ghcr.io/freedomofpress/dangerzone/dangerzone"
|
||||||
|
digest = "123456"
|
||||||
|
mocker.patch("dangerzone.updater.cosign.ensure_installed", return_value=True)
|
||||||
|
fp.register_subprocess(
|
||||||
|
["cosign", "download", "signature", f"{image}@sha256:{digest}"], returncode=1
|
||||||
|
)
|
||||||
|
with pytest.raises(errors.NoRemoteSignatures):
|
||||||
|
get_remote_signatures(image, digest)
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_remote_signatures_empty(fp: FakeProcess, mocker):
|
||||||
|
image = "ghcr.io/freedomofpress/dangerzone/dangerzone"
|
||||||
|
digest = "123456"
|
||||||
|
mocker.patch("dangerzone.updater.cosign.ensure_installed", return_value=True)
|
||||||
|
fp.register_subprocess(
|
||||||
|
["cosign", "download", "signature", f"{image}@sha256:{digest}"],
|
||||||
|
stdout=json.dumps({}),
|
||||||
|
)
|
||||||
|
with pytest.raises(errors.NoRemoteSignatures):
|
||||||
|
get_remote_signatures(image, digest)
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_remote_signatures_cosign_error(mocker, fp: FakeProcess):
|
||||||
|
image = "ghcr.io/freedomofpress/dangerzone/dangerzone"
|
||||||
|
digest = "123456"
|
||||||
|
mocker.patch("dangerzone.updater.cosign.ensure_installed", return_value=True)
|
||||||
|
fp.register_subprocess(
|
||||||
|
["cosign", "download", "signature", f"{image}@sha256:{digest}"],
|
||||||
|
returncode=1,
|
||||||
|
stderr="Error: no signatures associated",
|
||||||
|
)
|
||||||
|
with pytest.raises(errors.NoRemoteSignatures):
|
||||||
|
get_remote_signatures(image, digest)
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_remote_signatures_cosign_error(mocker, fp: FakeProcess):
|
||||||
|
image = "ghcr.io/freedomofpress/dangerzone/dangerzone"
|
||||||
|
digest = "123456"
|
||||||
|
mocker.patch("dangerzone.updater.cosign.ensure_installed", return_value=True)
|
||||||
|
fp.register_subprocess(
|
||||||
|
["cosign", "download", "signature", f"{image}@sha256:{digest}"],
|
||||||
|
returncode=1,
|
||||||
|
stderr="Error: no signatures associated",
|
||||||
|
)
|
||||||
|
with pytest.raises(errors.NoRemoteSignatures):
|
||||||
|
get_remote_signatures(image, digest)
|
||||||
|
|
||||||
|
|
||||||
|
def test_store_signatures_with_different_digests(
|
||||||
|
valid_signature, signature_other_digest, mocker, tmp_path
|
||||||
|
):
|
||||||
|
"""Test that store_signatures raises an error when a signature's digest doesn't match."""
|
||||||
|
signatures = [valid_signature, signature_other_digest]
|
||||||
|
image_digest = "sha256:123456"
|
||||||
|
|
||||||
|
# Mock the signatures path
|
||||||
|
signatures_path = tmp_path / "signatures"
|
||||||
|
signatures_path.mkdir()
|
||||||
|
mocker.patch("dangerzone.updater.signatures.SIGNATURES_PATH", signatures_path)
|
||||||
|
|
||||||
|
# Mock get_log_index_from_signatures
|
||||||
|
mocker.patch(
|
||||||
|
"dangerzone.updater.signatures.get_log_index_from_signatures",
|
||||||
|
return_value=100,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Mock get_last_log_index
|
||||||
|
mocker.patch(
|
||||||
|
"dangerzone.updater.signatures.get_last_log_index",
|
||||||
|
return_value=50,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Call store_signatures
|
||||||
|
with pytest.raises(errors.SignatureMismatch):
|
||||||
|
store_signatures(signatures, image_digest, TEST_PUBKEY_PATH)
|
||||||
|
|
||||||
|
# Verify that the signatures file was not created
|
||||||
|
assert not (signatures_path / f"{image_digest}.json").exists()
|
||||||
|
|
||||||
|
# Verify that the log index file was not created
|
||||||
|
assert not (signatures_path / "last_log_index").exists()
|
||||||
|
|
||||||
|
|
||||||
|
def test_stores_signatures_updates_last_log_index(valid_signature, mocker, tmp_path):
|
||||||
|
"""Test that store_signatures updates the last log index file."""
|
||||||
|
signatures = [valid_signature]
|
||||||
|
# Extract the digest from the signature
|
||||||
|
image_digest = Signature(valid_signature).manifest_digest
|
||||||
|
|
||||||
|
# Mock the signatures path
|
||||||
|
signatures_path = tmp_path / "signatures"
|
||||||
|
signatures_path.mkdir()
|
||||||
|
mocker.patch("dangerzone.updater.signatures.SIGNATURES_PATH", signatures_path)
|
||||||
|
|
||||||
|
# Create an existing last_log_index file with a lower value
|
||||||
|
with open(signatures_path / "last_log_index", "w") as f:
|
||||||
|
f.write("50")
|
||||||
|
|
||||||
|
# Mock get_log_index_from_signatures to return a higher value
|
||||||
|
mocker.patch(
|
||||||
|
"dangerzone.updater.signatures.get_log_index_from_signatures",
|
||||||
|
return_value=100,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Call store_signatures
|
||||||
|
store_signatures(signatures, image_digest, TEST_PUBKEY_PATH)
|
||||||
|
|
||||||
|
# Verify that the log index file was updated
|
||||||
|
assert (signatures_path / "last_log_index").exists()
|
||||||
|
with open(signatures_path / "last_log_index", "r") as f:
|
||||||
|
assert f.read() == "100"
|
||||||
|
|
||||||
|
|
||||||
|
def test_is_update_available_when_remote_image_available(mocker):
|
||||||
|
"""
|
||||||
|
Test that is_update_available returns True when a new image is available
|
||||||
|
and all checks pass
|
||||||
|
"""
|
||||||
|
# Mock is_new_remote_image_available to return True and digest
|
||||||
|
mocker.patch(
|
||||||
|
"dangerzone.updater.registry.is_new_remote_image_available",
|
||||||
|
return_value=(True, RANDOM_DIGEST),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Mock check_signatures_and_logindex to not raise any exceptions
|
||||||
|
mocker.patch(
|
||||||
|
"dangerzone.updater.signatures.check_signatures_and_logindex",
|
||||||
|
return_value=[{"some": "signature"}],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Call is_update_available
|
||||||
|
update_available, digest = is_update_available(
|
||||||
|
"ghcr.io/freedomofpress/dangerzone", "test.pub"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify the result
|
||||||
|
assert update_available is True
|
||||||
|
assert digest == RANDOM_DIGEST
|
||||||
|
|
||||||
|
|
||||||
|
def test_is_update_available_when_no_remote_image(mocker):
|
||||||
|
"""
|
||||||
|
Test that is_update_available returns False when no remote image is available
|
||||||
|
"""
|
||||||
|
# Mock is_new_remote_image_available to return False
|
||||||
|
mocker.patch(
|
||||||
|
"dangerzone.updater.registry.is_new_remote_image_available",
|
||||||
|
return_value=(False, None),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Call is_update_available
|
||||||
|
update_available, digest = is_update_available(
|
||||||
|
"ghcr.io/freedomofpress/dangerzone", "test.pub"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify the result
|
||||||
|
assert update_available is False
|
||||||
|
assert digest is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_is_update_available_with_invalid_log_index(mocker):
|
||||||
|
"""
|
||||||
|
Test that is_update_available returns False when the log index is invalid
|
||||||
|
"""
|
||||||
|
# Mock is_new_remote_image_available to return True
|
||||||
|
mocker.patch(
|
||||||
|
"dangerzone.updater.registry.is_new_remote_image_available",
|
||||||
|
return_value=(True, RANDOM_DIGEST),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Mock check_signatures_and_logindex to raise InvalidLogIndex
|
||||||
|
mocker.patch(
|
||||||
|
"dangerzone.updater.signatures.check_signatures_and_logindex",
|
||||||
|
side_effect=errors.InvalidLogIndex("Invalid log index"),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Call is_update_available
|
||||||
|
update_available, digest = is_update_available(
|
||||||
|
"ghcr.io/freedomofpress/dangerzone", "test.pub"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify the result
|
||||||
|
assert update_available is False
|
||||||
|
assert digest is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_verify_signature(valid_signature):
|
||||||
|
"""Test that verify_signature raises an error when the payload digest doesn't match."""
|
||||||
|
verify_signature(
|
||||||
|
valid_signature,
|
||||||
|
Signature(valid_signature).manifest_digest,
|
||||||
|
TEST_PUBKEY_PATH,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_verify_signature_tempered(tempered_signature):
|
||||||
|
"""Test that verify_signature raises an error when the payload digest doesn't match."""
|
||||||
|
# Call verify_signature and expect an error
|
||||||
|
with pytest.raises(errors.SignatureError):
|
||||||
|
verify_signature(
|
||||||
|
tempered_signature,
|
||||||
|
Signature(tempered_signature).manifest_digest,
|
||||||
|
TEST_PUBKEY_PATH,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_verify_signatures_empty_list():
|
||||||
|
with pytest.raises(errors.SignatureVerificationError):
|
||||||
|
verify_signatures([], "1234", TEST_PUBKEY_PATH)
|
||||||
|
|
||||||
|
|
||||||
|
def test_verify_signatures_not_0():
|
||||||
|
pass
|
Loading…
Reference in a new issue