mirror of
https://github.com/freedomofpress/dangerzone.git
synced 2025-05-18 19:20:35 +02:00
Merge e388fe6090
into b2f4e2d523
This commit is contained in:
commit
82c94fe4ef
26 changed files with 1177 additions and 269 deletions
2
.github/workflows/build.yml
vendored
2
.github/workflows/build.yml
vendored
|
@ -85,7 +85,7 @@ jobs:
|
||||||
id: cache-container-image
|
id: cache-container-image
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
key: v3-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/common.py', 'dangerzone/conversion/doc_to_pixels.py', 'dangerzone/conversion/pixels_to_pdf.py', 'poetry.lock', 'gvisor_wrapper/entrypoint.py') }}
|
key: v4-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/*.py', 'dangerzone/container_helpers/*', 'install/common/build-image.py') }}
|
||||||
path: |
|
path: |
|
||||||
share/container.tar.gz
|
share/container.tar.gz
|
||||||
share/image-id.txt
|
share/image-id.txt
|
||||||
|
|
36
.github/workflows/ci.yml
vendored
36
.github/workflows/ci.yml
vendored
|
@ -59,7 +59,7 @@ jobs:
|
||||||
id: cache-container-image
|
id: cache-container-image
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
key: v3-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/common.py', 'dangerzone/conversion/doc_to_pixels.py', 'dangerzone/conversion/pixels_to_pdf.py', 'poetry.lock', 'gvisor_wrapper/entrypoint.py') }}
|
key: v4-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/*.py', 'dangerzone/container_helpers/*', 'install/common/build-image.py') }}
|
||||||
path: |-
|
path: |-
|
||||||
share/container.tar.gz
|
share/container.tar.gz
|
||||||
share/image-id.txt
|
share/image-id.txt
|
||||||
|
@ -67,7 +67,6 @@ jobs:
|
||||||
- name: Build Dangerzone container image
|
- name: Build Dangerzone container image
|
||||||
if: ${{ steps.cache-container-image.outputs.cache-hit != 'true' }}
|
if: ${{ steps.cache-container-image.outputs.cache-hit != 'true' }}
|
||||||
run: |
|
run: |
|
||||||
sudo apt-get install -y python3-poetry
|
|
||||||
python3 ./install/common/build-image.py
|
python3 ./install/common/build-image.py
|
||||||
|
|
||||||
- name: Upload container image
|
- name: Upload container image
|
||||||
|
@ -227,7 +226,7 @@ jobs:
|
||||||
- name: Restore container cache
|
- name: Restore container cache
|
||||||
uses: actions/cache/restore@v4
|
uses: actions/cache/restore@v4
|
||||||
with:
|
with:
|
||||||
key: v3-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/common.py', 'dangerzone/conversion/doc_to_pixels.py', 'dangerzone/conversion/pixels_to_pdf.py', 'poetry.lock', 'gvisor_wrapper/entrypoint.py') }}
|
key: v4-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/*.py', 'dangerzone/container_helpers/*', 'install/common/build-image.py') }}
|
||||||
path: |-
|
path: |-
|
||||||
share/container.tar.gz
|
share/container.tar.gz
|
||||||
share/image-id.txt
|
share/image-id.txt
|
||||||
|
@ -334,7 +333,7 @@ jobs:
|
||||||
- name: Restore container image
|
- name: Restore container image
|
||||||
uses: actions/cache/restore@v4
|
uses: actions/cache/restore@v4
|
||||||
with:
|
with:
|
||||||
key: v3-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/common.py', 'dangerzone/conversion/doc_to_pixels.py', 'dangerzone/conversion/pixels_to_pdf.py', 'poetry.lock', 'gvisor_wrapper/entrypoint.py') }}
|
key: v4-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/*.py', 'dangerzone/container_helpers/*', 'install/common/build-image.py') }}
|
||||||
path: |-
|
path: |-
|
||||||
share/container.tar.gz
|
share/container.tar.gz
|
||||||
share/image-id.txt
|
share/image-id.txt
|
||||||
|
@ -429,7 +428,7 @@ jobs:
|
||||||
- name: Restore container image
|
- name: Restore container image
|
||||||
uses: actions/cache/restore@v4
|
uses: actions/cache/restore@v4
|
||||||
with:
|
with:
|
||||||
key: v3-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/common.py', 'dangerzone/conversion/doc_to_pixels.py', 'dangerzone/conversion/pixels_to_pdf.py', 'poetry.lock', 'gvisor_wrapper/entrypoint.py') }}
|
key: v4-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/*.py', 'dangerzone/container_helpers/*', 'install/common/build-image.py') }}
|
||||||
path: |-
|
path: |-
|
||||||
share/container.tar.gz
|
share/container.tar.gz
|
||||||
share/image-id.txt
|
share/image-id.txt
|
||||||
|
@ -472,3 +471,30 @@ jobs:
|
||||||
# file successfully.
|
# file successfully.
|
||||||
xvfb-run -s '-ac' ./dev_scripts/env.py --distro ${{ matrix.distro }} --version ${{ matrix.version }} run --dev \
|
xvfb-run -s '-ac' ./dev_scripts/env.py --distro ${{ matrix.distro }} --version ${{ matrix.version }} run --dev \
|
||||||
bash -c 'cd dangerzone; poetry run make test'
|
bash -c 'cd dangerzone; poetry run make test'
|
||||||
|
|
||||||
|
check-reproducibility:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Install dev. dependencies
|
||||||
|
run: |-
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y git python3-poetry --no-install-recommends
|
||||||
|
poetry install --only package
|
||||||
|
|
||||||
|
- name: Verify that the Dockerfile matches the commited template and params
|
||||||
|
run: |-
|
||||||
|
cp Dockerfile Dockerfile.orig
|
||||||
|
make Dockerfile
|
||||||
|
diff Dockerfile.orig Dockerfile
|
||||||
|
|
||||||
|
- name: Build Dangerzone container image
|
||||||
|
run: |
|
||||||
|
python3 ./install/common/build-image.py --no-save
|
||||||
|
|
||||||
|
- name: Reproduce the same container image
|
||||||
|
run: |
|
||||||
|
./dev_scripts/reproduce-image.py
|
||||||
|
|
10
.github/workflows/scan.yml
vendored
10
.github/workflows/scan.yml
vendored
|
@ -21,13 +21,17 @@ jobs:
|
||||||
sudo apt install pipx
|
sudo apt install pipx
|
||||||
pipx install poetry
|
pipx install poetry
|
||||||
pipx inject poetry poetry-plugin-export
|
pipx inject poetry poetry-plugin-export
|
||||||
|
poetry install --only package
|
||||||
|
- name: Bump date of Debian snapshot archive
|
||||||
|
run: |
|
||||||
|
date=$(date "+%Y%m%d")
|
||||||
|
sed -i "s/DEBIAN_ARCHIVE_DATE=[0-9]\+/DEBIAN_ARCHIVE_DATE=${date}/" Dockerfile.env
|
||||||
|
make Dockerfile
|
||||||
- name: Build container image
|
- name: Build container image
|
||||||
run: python3 ./install/common/build-image.py --runtime docker --no-save
|
run: python3 ./install/common/build-image.py --runtime docker --no-save
|
||||||
- name: Get image tag
|
- name: Get image tag
|
||||||
id: tag
|
id: tag
|
||||||
run: |
|
run: echo "tag=$(cat share/image-id.txt)" >> $GITHUB_OUTPUT
|
||||||
tag=$(docker images dangerzone.rocks/dangerzone --format '{{ .Tag }}')
|
|
||||||
echo "tag=$tag" >> $GITHUB_OUTPUT
|
|
||||||
# NOTE: Scan first without failing, else we won't be able to read the scan
|
# NOTE: Scan first without failing, else we won't be able to read the scan
|
||||||
# report.
|
# report.
|
||||||
- name: Scan container image (no fail)
|
- name: Scan container image (no fail)
|
||||||
|
|
38
.grype.yaml
38
.grype.yaml
|
@ -2,10 +2,38 @@
|
||||||
# latest release of Dangerzone, and offer our analysis.
|
# latest release of Dangerzone, and offer our analysis.
|
||||||
|
|
||||||
ignore:
|
ignore:
|
||||||
# CVE-2024-11053
|
# CVE-2023-45853
|
||||||
# ==============
|
# ==============
|
||||||
#
|
#
|
||||||
# NVD Entry: https://nvd.nist.gov/vuln/detail/CVE-2024-11053
|
# Debian tracker: https://security-tracker.debian.org/tracker/CVE-2023-45853
|
||||||
# Verdict: Dangerzone is not affected because libcurl is an HTTP client, and
|
# Verdict: Dangerzone is not affected because the zlib library in Debian is
|
||||||
# the Dangerzone container does not make any network calls.
|
# built in a way that is not vulnerable.
|
||||||
- vulnerability: CVE-2024-11053
|
- vulnerability: CVE-2023-45853
|
||||||
|
# CVE-2024-38428
|
||||||
|
# ==============
|
||||||
|
#
|
||||||
|
# Debian tracker: https://security-tracker.debian.org/tracker/CVE-2024-38428
|
||||||
|
# Verdict: Dangerzone is not affected because it doesn't use wget in the
|
||||||
|
# container image (which also has no network connectivity).
|
||||||
|
- vulnerability: CVE-2024-38428
|
||||||
|
# CVE-2024-57823
|
||||||
|
# ==============
|
||||||
|
#
|
||||||
|
# Debian tracker: https://security-tracker.debian.org/tracker/CVE-2024-57823
|
||||||
|
# Verdict: Dangerzone is not affected. First things first, LibreOffice is
|
||||||
|
# using this library for parsing RDF metadata in a document [1], and has
|
||||||
|
# issued a fix for the vendored raptor2 package they have for other distros
|
||||||
|
# [2].
|
||||||
|
#
|
||||||
|
# On the other hand, the Debian security team has stated that this is a minor
|
||||||
|
# issue [3], and there's no fix from the developers yet. It seems that the
|
||||||
|
# Debian package is not affected somehow by this CVE, probably due to the way
|
||||||
|
# it's packaged.
|
||||||
|
#
|
||||||
|
# [1] https://wiki.documentfoundation.org/Documentation/DevGuide/Office_Development#RDF_metadata
|
||||||
|
# [2] https://cgit.freedesktop.org/libreoffice/core/commit/?id=2b50dc0e4482ac0ad27d69147b4175e05af4fba4
|
||||||
|
# [2] From https://security-tracker.debian.org/tracker/CVE-2024-57823:
|
||||||
|
#
|
||||||
|
# [bookworm] - raptor2 <postponed> (Minor issue, revisit when fixed upstream)
|
||||||
|
#
|
||||||
|
- vulnerability: CVE-2024-57823
|
||||||
|
|
6
BUILD.md
6
BUILD.md
|
@ -515,3 +515,9 @@ poetry run .\install\windows\build-app.bat
|
||||||
```
|
```
|
||||||
|
|
||||||
When you're done you will have `dist\Dangerzone.msi`.
|
When you're done you will have `dist\Dangerzone.msi`.
|
||||||
|
|
||||||
|
## Updating the container image
|
||||||
|
|
||||||
|
The Dangezone container image is reproducible. This means that every time we
|
||||||
|
build it, the result will be bit-for-bit the same, with some minor exceptions.
|
||||||
|
Read more on how you can update it in `docs/developer/reproducibility.md`.
|
||||||
|
|
273
Dockerfile
273
Dockerfile
|
@ -1,102 +1,211 @@
|
||||||
###########################################
|
# NOTE: Updating the packages to their latest versions requires bumping the
|
||||||
# Build PyMuPDF
|
# Dockerfile args below. For more info about this file, read
|
||||||
|
# docs/developer/reproducibility.md.
|
||||||
|
|
||||||
FROM alpine:latest as pymupdf-build
|
ARG DEBIAN_IMAGE_DATE=20250113
|
||||||
ARG ARCH
|
|
||||||
ARG REQUIREMENTS_TXT
|
|
||||||
|
|
||||||
# Install PyMuPDF via hash-checked requirements file
|
FROM debian:bookworm-${DEBIAN_IMAGE_DATE}-slim as dangerzone-image
|
||||||
COPY ${REQUIREMENTS_TXT} /tmp/requirements.txt
|
|
||||||
|
|
||||||
# PyMuPDF provides non-arm musl wheels only.
|
ARG GVISOR_ARCHIVE_DATE=20250120
|
||||||
# Only install build-dependencies if we are actually building the wheel
|
ARG DEBIAN_ARCHIVE_DATE=20250127
|
||||||
RUN case "$ARCH" in \
|
ARG H2ORESTART_CHECKSUM=7760dc2963332c50d15eee285933ec4b48d6a1de9e0c0f6082946f93090bd132
|
||||||
"arm64") \
|
ARG H2ORESTART_VERSION=v0.7.0
|
||||||
# This is required for copying later, but is created only in the pre-built wheels
|
|
||||||
mkdir -p /usr/lib/python3.12/site-packages/PyMuPDF.libs/ \
|
|
||||||
&& apk --no-cache add linux-headers g++ linux-headers gcc make python3-dev py3-pip clang-dev ;; \
|
|
||||||
*) \
|
|
||||||
apk --no-cache add py3-pip ;; \
|
|
||||||
esac
|
|
||||||
RUN pip install -vv --break-system-packages --require-hashes -r /tmp/requirements.txt
|
|
||||||
|
|
||||||
|
ENV DEBIAN_FRONTEND=noninteractive
|
||||||
|
|
||||||
###########################################
|
# The following way of installing packages is taken from
|
||||||
# Download H2ORestart
|
# https://github.com/reproducible-containers/repro-sources-list.sh/blob/master/Dockerfile.debian-12,
|
||||||
FROM alpine:latest as h2orestart-dl
|
# and adapted to allow installing gVisor from each own repo as well.
|
||||||
ARG H2ORESTART_CHECKSUM=d09bc5c93fe2483a7e4a57985d2a8d0e4efae2efb04375fe4b59a68afd7241e2
|
RUN \
|
||||||
RUN mkdir /libreoffice_ext && cd libreoffice_ext \
|
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||||
|
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||||
|
--mount=type=bind,source=./container_helpers/repro-sources-list.sh,target=/usr/local/bin/repro-sources-list.sh \
|
||||||
|
--mount=type=bind,source=./container_helpers/gvisor.key,target=/tmp/gvisor.key \
|
||||||
|
: "Hacky way to set a date for the Debian snapshot repos" && \
|
||||||
|
touch -d ${DEBIAN_ARCHIVE_DATE} /etc/apt/sources.list.d/debian.sources && \
|
||||||
|
touch -d ${DEBIAN_ARCHIVE_DATE} /etc/apt/sources.list && \
|
||||||
|
repro-sources-list.sh && \
|
||||||
|
: "Setup APT to install gVisor from its separate APT repo" && \
|
||||||
|
apt-get update && \
|
||||||
|
apt-get upgrade -y && \
|
||||||
|
apt-get install -y --no-install-recommends apt-transport-https ca-certificates gnupg && \
|
||||||
|
gpg -o /usr/share/keyrings/gvisor-archive-keyring.gpg --dearmor /tmp/gvisor.key && \
|
||||||
|
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/gvisor-archive-keyring.gpg] https://storage.googleapis.com/gvisor/releases ${GVISOR_ARCHIVE_DATE} main" > /etc/apt/sources.list.d/gvisor.list && \
|
||||||
|
: "Install the necessary gVisor and Dangerzone dependencies" && \
|
||||||
|
apt-get update && \
|
||||||
|
apt-get install -y --no-install-recommends \
|
||||||
|
python3 python3-fitz libreoffice-nogui libreoffice-java-common \
|
||||||
|
python3 python3-magic default-jre-headless fonts-noto-cjk fonts-dejavu \
|
||||||
|
runsc unzip wget && \
|
||||||
|
: "Clean up for improving reproducibility (optional)" && \
|
||||||
|
rm -rf /var/cache/fontconfig/ && \
|
||||||
|
rm -rf /etc/ssl/certs/java/cacerts && \
|
||||||
|
rm -rf /var/log/* /var/cache/ldconfig/aux-cache
|
||||||
|
|
||||||
|
# Download H2ORestart from GitHub using a pinned version and hash. Note that
|
||||||
|
# it's available in Debian repos, but not in Bookworm yet.
|
||||||
|
RUN mkdir /opt/libreoffice_ext && cd /opt/libreoffice_ext \
|
||||||
&& H2ORESTART_FILENAME=h2orestart.oxt \
|
&& H2ORESTART_FILENAME=h2orestart.oxt \
|
||||||
&& H2ORESTART_VERSION="v0.6.6" \
|
|
||||||
&& wget https://github.com/ebandal/H2Orestart/releases/download/$H2ORESTART_VERSION/$H2ORESTART_FILENAME \
|
&& wget https://github.com/ebandal/H2Orestart/releases/download/$H2ORESTART_VERSION/$H2ORESTART_FILENAME \
|
||||||
&& echo "$H2ORESTART_CHECKSUM $H2ORESTART_FILENAME" | sha256sum -c \
|
&& echo "$H2ORESTART_CHECKSUM $H2ORESTART_FILENAME" | sha256sum -c \
|
||||||
&& install -dm777 "/usr/lib/libreoffice/share/extensions/"
|
&& install -dm777 "/usr/lib/libreoffice/share/extensions/" \
|
||||||
|
&& rm /root/.wget-hsts
|
||||||
|
|
||||||
|
# Create an unprivileged user both for gVisor and for running Dangerzone.
|
||||||
|
RUN addgroup --gid 1000 dangerzone
|
||||||
|
RUN adduser --uid 1000 --ingroup dangerzone --shell /bin/true \
|
||||||
|
--disabled-password --home /home/dangerzone dangerzone
|
||||||
|
|
||||||
###########################################
|
# Copy Dangerzone's conversion logic under /opt/dangerzone, and allow Python to
|
||||||
# Dangerzone image
|
# import it.
|
||||||
|
|
||||||
FROM alpine:latest AS dangerzone-image
|
|
||||||
|
|
||||||
# Install dependencies
|
|
||||||
RUN apk --no-cache -U upgrade && \
|
|
||||||
apk --no-cache add \
|
|
||||||
libreoffice \
|
|
||||||
openjdk8 \
|
|
||||||
python3 \
|
|
||||||
py3-magic \
|
|
||||||
font-noto-cjk
|
|
||||||
|
|
||||||
COPY --from=pymupdf-build /usr/lib/python3.12/site-packages/fitz/ /usr/lib/python3.12/site-packages/fitz
|
|
||||||
COPY --from=pymupdf-build /usr/lib/python3.12/site-packages/pymupdf/ /usr/lib/python3.12/site-packages/pymupdf
|
|
||||||
COPY --from=pymupdf-build /usr/lib/python3.12/site-packages/PyMuPDF.libs/ /usr/lib/python3.12/site-packages/PyMuPDF.libs
|
|
||||||
COPY --from=h2orestart-dl /libreoffice_ext/ /libreoffice_ext
|
|
||||||
|
|
||||||
RUN install -dm777 "/usr/lib/libreoffice/share/extensions/"
|
|
||||||
|
|
||||||
RUN mkdir -p /opt/dangerzone/dangerzone
|
RUN mkdir -p /opt/dangerzone/dangerzone
|
||||||
RUN touch /opt/dangerzone/dangerzone/__init__.py
|
RUN touch /opt/dangerzone/dangerzone/__init__.py
|
||||||
COPY conversion /opt/dangerzone/dangerzone/conversion
|
|
||||||
|
|
||||||
# Add the unprivileged user. Set the UID/GID of the dangerzone user/group to
|
# Copy only the Python code, and not any produced .pyc files.
|
||||||
# 1000, since we will point to it from the OCI config.
|
COPY conversion/*.py /opt/dangerzone/dangerzone/conversion/
|
||||||
#
|
|
||||||
# NOTE: A tmpfs will be mounted over /home/dangerzone directory,
|
|
||||||
# so nothing within it from the image will be persisted.
|
|
||||||
RUN addgroup -g 1000 dangerzone && \
|
|
||||||
adduser -u 1000 -s /bin/true -G dangerzone -h /home/dangerzone -D dangerzone
|
|
||||||
|
|
||||||
###########################################
|
|
||||||
# gVisor wrapper image
|
|
||||||
|
|
||||||
FROM alpine:latest
|
|
||||||
|
|
||||||
RUN apk --no-cache -U upgrade && \
|
|
||||||
apk --no-cache add python3
|
|
||||||
|
|
||||||
RUN GVISOR_URL="https://storage.googleapis.com/gvisor/releases/release/latest/$(uname -m)"; \
|
|
||||||
wget "${GVISOR_URL}/runsc" "${GVISOR_URL}/runsc.sha512" && \
|
|
||||||
sha512sum -c runsc.sha512 && \
|
|
||||||
rm -f runsc.sha512 && \
|
|
||||||
chmod 555 runsc && \
|
|
||||||
mv runsc /usr/bin/
|
|
||||||
|
|
||||||
# Add the unprivileged `dangerzone` user.
|
|
||||||
RUN addgroup dangerzone && \
|
|
||||||
adduser -s /bin/true -G dangerzone -h /home/dangerzone -D dangerzone
|
|
||||||
|
|
||||||
# Switch to the dangerzone user for the rest of the script.
|
|
||||||
USER dangerzone
|
|
||||||
|
|
||||||
# Copy the Dangerzone image, as created by the previous steps, into the home
|
|
||||||
# directory of the `dangerzone` user.
|
|
||||||
RUN mkdir /home/dangerzone/dangerzone-image
|
|
||||||
COPY --from=dangerzone-image / /home/dangerzone/dangerzone-image/rootfs
|
|
||||||
|
|
||||||
# Create a directory that will be used by gVisor as the place where it will
|
# Create a directory that will be used by gVisor as the place where it will
|
||||||
# store the state of its containers.
|
# store the state of its containers.
|
||||||
RUN mkdir /home/dangerzone/.containers
|
RUN mkdir /home/dangerzone/.containers
|
||||||
|
|
||||||
COPY gvisor_wrapper/entrypoint.py /
|
###############################################################################
|
||||||
|
#
|
||||||
|
# REUSING CONTAINER IMAGES:
|
||||||
|
# Anatomy of a hack
|
||||||
|
# ========================
|
||||||
|
#
|
||||||
|
# The rest of the Dockerfile aims to do one thing: allow the final container
|
||||||
|
# image to actually contain two container images; one for the outer container
|
||||||
|
# (spawned by Podman/Docker Desktop), and one for the inner container (spawned
|
||||||
|
# by gVisor).
|
||||||
|
#
|
||||||
|
# This has already been done in the past, and we explain why and how in the
|
||||||
|
# design document for gVisor integration (should be in
|
||||||
|
# `docs/developer/gvisor.md`). In this iteration, we want to also
|
||||||
|
# achieve the following:
|
||||||
|
#
|
||||||
|
# 1. Have a small final image, by sharing some system paths between the inner
|
||||||
|
# and outer container image using symlinks.
|
||||||
|
# 2. Allow our security scanning tool to see the contents of the inner
|
||||||
|
# container image.
|
||||||
|
# 3. Make the outer container image operational, in the sense that you can use
|
||||||
|
# `apt` commands and perform a conversion with Dangerzone, outside the
|
||||||
|
# gVisor sandbox. This is helpful for debugging purposes.
|
||||||
|
#
|
||||||
|
# Below we'll explain how our design choices are informed by the above
|
||||||
|
# sub-goals.
|
||||||
|
#
|
||||||
|
# First, to achieve a small container image, we basically need to copy `/etc`,
|
||||||
|
# `/usr` and `/opt` from the original Dangerzone image to the **inner**
|
||||||
|
# container image (under `/home/dangerzone/dangerzone-image/rootfs/`)
|
||||||
|
#
|
||||||
|
# That's all we need. The rest of the files play no role, and we can actually
|
||||||
|
# mask them in gVisor's OCI config.
|
||||||
|
#
|
||||||
|
# Second, in order to let our security scanner find the installed packages,
|
||||||
|
# we need to copy the following dirs to the root of the **outer** container
|
||||||
|
# image:
|
||||||
|
# * `/etc`, so that the security scanner can detect the image type and its
|
||||||
|
# sources
|
||||||
|
# * `/var`, so that the security scanner can have access to the APT database.
|
||||||
|
#
|
||||||
|
# IMPORTANT: We don't symlink the `/etc` of the **outer** container image to
|
||||||
|
# the **inner** one, in order to avoid leaking files like
|
||||||
|
# `/etc/{hostname,hosts,resolv.conf}` that Podman/Docker mounts when running
|
||||||
|
# the **outer** container image.
|
||||||
|
#
|
||||||
|
# Third, in order to have an operational Debian image, we are _mostly_ covered
|
||||||
|
# by the dirs we have copied. There's a _rare_ case where during debugging, we
|
||||||
|
# may want to install a system package that has components in `/etc` and
|
||||||
|
# `/var`, which will not be available in the **inner** container image. In that
|
||||||
|
# case, the developer can do the necessary symlinks in the live container.
|
||||||
|
#
|
||||||
|
# FILESYSTEM HIERARCHY
|
||||||
|
# ====================
|
||||||
|
#
|
||||||
|
# The above plan leads to the following filesystem hierarchy:
|
||||||
|
#
|
||||||
|
# Outer container image:
|
||||||
|
#
|
||||||
|
# # ls -l /
|
||||||
|
# lrwxrwxrwx 1 root root 7 Jan 27 10:46 bin -> usr/bin
|
||||||
|
# -rwxr-xr-x 1 root root 7764 Jan 24 08:14 entrypoint.py
|
||||||
|
# drwxr-xr-x 1 root root 4096 Jan 27 10:47 etc
|
||||||
|
# drwxr-xr-x 1 root root 4096 Jan 27 10:46 home
|
||||||
|
# lrwxrwxrwx 1 root root 7 Jan 27 10:46 lib -> usr/lib
|
||||||
|
# lrwxrwxrwx 1 root root 9 Jan 27 10:46 lib64 -> usr/lib64
|
||||||
|
# drwxr-xr-x 2 root root 4096 Jan 27 10:46 root
|
||||||
|
# drwxr-xr-x 1 root root 4096 Jan 27 10:47 run
|
||||||
|
# lrwxrwxrwx 1 root root 8 Jan 27 10:46 sbin -> usr/sbin
|
||||||
|
# drwxrwxrwx 2 root root 4096 Jan 27 10:46 tmp
|
||||||
|
# lrwxrwxrwx 1 root root 44 Jan 27 10:46 usr -> /home/dangerzone/dangerzone-image/rootfs/usr
|
||||||
|
# drwxr-xr-x 11 root root 4096 Jan 27 10:47 var
|
||||||
|
#
|
||||||
|
# Inner container image:
|
||||||
|
#
|
||||||
|
# # ls -l /home/dangerzone/dangerzone-image/rootfs/
|
||||||
|
# total 12
|
||||||
|
# lrwxrwxrwx 1 root root 7 Jan 27 10:47 bin -> usr/bin
|
||||||
|
# drwxr-xr-x 43 root root 4096 Jan 27 10:46 etc
|
||||||
|
# lrwxrwxrwx 1 root root 7 Jan 27 10:47 lib -> usr/lib
|
||||||
|
# lrwxrwxrwx 1 root root 9 Jan 27 10:47 lib64 -> usr/lib64
|
||||||
|
# drwxr-xr-x 4 root root 4096 Jan 27 10:47 opt
|
||||||
|
# drwxr-xr-x 12 root root 4096 Jan 27 10:47 usr
|
||||||
|
#
|
||||||
|
# SYMLINKING /USR
|
||||||
|
# ===============
|
||||||
|
#
|
||||||
|
# It's surprisingly difficult (maybe even borderline impossible), to symlink
|
||||||
|
# `/usr` to a different path during image build. The problem is that /usr
|
||||||
|
# is very sensitive, and you can't manipulate it in a live system. That is, I
|
||||||
|
# haven't found a way to do the following, or something equivalent:
|
||||||
|
#
|
||||||
|
# rm -r /usr && ln -s /home/dangerzone/dangerzone-image/rootfs/usr/ /usr
|
||||||
|
#
|
||||||
|
# The `ln` binary, even if you specify it by its full path, cannot run
|
||||||
|
# (probably because `ld-linux.so` can't be found). For this reason, we have
|
||||||
|
# to create the symlinks beforehand, in a previous build stage. Then, in an
|
||||||
|
# empty contianer image (scratch images), we can copy these symlinks and the
|
||||||
|
# /usr, and stich everything together.
|
||||||
|
###############################################################################
|
||||||
|
|
||||||
|
# Create the filesystem hierarchy that will be used to symlink /usr.
|
||||||
|
|
||||||
|
RUN mkdir /new_root
|
||||||
|
RUN mkdir /new_root/root /new_root/run /new_root/tmp
|
||||||
|
RUN chmod 777 /new_root/tmp
|
||||||
|
RUN ln -s /home/dangerzone/dangerzone-image/rootfs/usr /new_root/usr
|
||||||
|
RUN ln -s usr/bin /new_root/bin
|
||||||
|
RUN ln -s usr/lib /new_root/lib
|
||||||
|
RUN ln -s usr/lib64 /new_root/lib64
|
||||||
|
RUN ln -s usr/sbin /new_root/sbin
|
||||||
|
|
||||||
|
## Final image
|
||||||
|
|
||||||
|
FROM scratch
|
||||||
|
|
||||||
|
# Copy the filesystem hierarchy that we created in the previous stage, so that
|
||||||
|
# /usr can be a symlink.
|
||||||
|
COPY --from=dangerzone-image /new_root/ /
|
||||||
|
|
||||||
|
# Copy the bare minimum to run Dangerzone in the inner container image.
|
||||||
|
COPY --from=dangerzone-image /etc/ /home/dangerzone/dangerzone-image/rootfs/etc/
|
||||||
|
COPY --from=dangerzone-image /opt/ /home/dangerzone/dangerzone-image/rootfs/opt/
|
||||||
|
COPY --from=dangerzone-image /usr/ /home/dangerzone/dangerzone-image/rootfs/usr/
|
||||||
|
RUN ln -s usr/bin /home/dangerzone/dangerzone-image/rootfs/bin
|
||||||
|
RUN ln -s usr/lib /home/dangerzone/dangerzone-image/rootfs/lib
|
||||||
|
RUN ln -s usr/lib64 /home/dangerzone/dangerzone-image/rootfs/lib64
|
||||||
|
|
||||||
|
# Copy the bare minimum to let the security scanner find vulnerabilities.
|
||||||
|
COPY --from=dangerzone-image /etc/ /etc/
|
||||||
|
COPY --from=dangerzone-image /var/ /var/
|
||||||
|
|
||||||
|
# Allow our entrypoint script to make changes in the following folders.
|
||||||
|
RUN chown dangerzone:dangerzone /home/dangerzone /home/dangerzone/dangerzone-image/
|
||||||
|
|
||||||
|
# Switch to the dangerzone user for the rest of the script.
|
||||||
|
USER dangerzone
|
||||||
|
|
||||||
|
COPY container_helpers/entrypoint.py /
|
||||||
|
|
||||||
ENTRYPOINT ["/entrypoint.py"]
|
ENTRYPOINT ["/entrypoint.py"]
|
||||||
|
|
9
Dockerfile.env
Normal file
9
Dockerfile.env
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
# Can be bumped to the latest date in https://hub.docker.com/_/debian/tags?name=bookworm-
|
||||||
|
DEBIAN_IMAGE_DATE=20250113
|
||||||
|
# Can be bumped to today's date
|
||||||
|
DEBIAN_ARCHIVE_DATE=20250127
|
||||||
|
# Can be bumped to the latest date in https://github.com/google/gvisor/tags
|
||||||
|
GVISOR_ARCHIVE_DATE=20250120
|
||||||
|
# Can be bumped to the latest version and checksum from https://github.com/ebandal/H2Orestart/releases
|
||||||
|
H2ORESTART_CHECKSUM=7760dc2963332c50d15eee285933ec4b48d6a1de9e0c0f6082946f93090bd132
|
||||||
|
H2ORESTART_VERSION=v0.7.0
|
211
Dockerfile.in
Normal file
211
Dockerfile.in
Normal file
|
@ -0,0 +1,211 @@
|
||||||
|
# NOTE: Updating the packages to their latest versions requires bumping the
|
||||||
|
# Dockerfile args below. For more info about this file, read
|
||||||
|
# docs/developer/reproducibility.md.
|
||||||
|
|
||||||
|
ARG DEBIAN_IMAGE_DATE={{DEBIAN_IMAGE_DATE}}
|
||||||
|
|
||||||
|
FROM debian:bookworm-${DEBIAN_IMAGE_DATE}-slim as dangerzone-image
|
||||||
|
|
||||||
|
ARG GVISOR_ARCHIVE_DATE={{GVISOR_ARCHIVE_DATE}}
|
||||||
|
ARG DEBIAN_ARCHIVE_DATE={{DEBIAN_ARCHIVE_DATE}}
|
||||||
|
ARG H2ORESTART_CHECKSUM={{H2ORESTART_CHECKSUM}}
|
||||||
|
ARG H2ORESTART_VERSION={{H2ORESTART_VERSION}}
|
||||||
|
|
||||||
|
ENV DEBIAN_FRONTEND=noninteractive
|
||||||
|
|
||||||
|
# The following way of installing packages is taken from
|
||||||
|
# https://github.com/reproducible-containers/repro-sources-list.sh/blob/master/Dockerfile.debian-12,
|
||||||
|
# and adapted to allow installing gVisor from each own repo as well.
|
||||||
|
RUN \
|
||||||
|
--mount=type=cache,target=/var/cache/apt,sharing=locked \
|
||||||
|
--mount=type=cache,target=/var/lib/apt,sharing=locked \
|
||||||
|
--mount=type=bind,source=./container_helpers/repro-sources-list.sh,target=/usr/local/bin/repro-sources-list.sh \
|
||||||
|
--mount=type=bind,source=./container_helpers/gvisor.key,target=/tmp/gvisor.key \
|
||||||
|
: "Hacky way to set a date for the Debian snapshot repos" && \
|
||||||
|
touch -d ${DEBIAN_ARCHIVE_DATE} /etc/apt/sources.list.d/debian.sources && \
|
||||||
|
touch -d ${DEBIAN_ARCHIVE_DATE} /etc/apt/sources.list && \
|
||||||
|
repro-sources-list.sh && \
|
||||||
|
: "Setup APT to install gVisor from its separate APT repo" && \
|
||||||
|
apt-get update && \
|
||||||
|
apt-get upgrade -y && \
|
||||||
|
apt-get install -y --no-install-recommends apt-transport-https ca-certificates gnupg && \
|
||||||
|
gpg -o /usr/share/keyrings/gvisor-archive-keyring.gpg --dearmor /tmp/gvisor.key && \
|
||||||
|
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/gvisor-archive-keyring.gpg] https://storage.googleapis.com/gvisor/releases ${GVISOR_ARCHIVE_DATE} main" > /etc/apt/sources.list.d/gvisor.list && \
|
||||||
|
: "Install the necessary gVisor and Dangerzone dependencies" && \
|
||||||
|
apt-get update && \
|
||||||
|
apt-get install -y --no-install-recommends \
|
||||||
|
python3 python3-fitz libreoffice-nogui libreoffice-java-common \
|
||||||
|
python3 python3-magic default-jre-headless fonts-noto-cjk fonts-dejavu \
|
||||||
|
runsc unzip wget && \
|
||||||
|
: "Clean up for improving reproducibility (optional)" && \
|
||||||
|
rm -rf /var/cache/fontconfig/ && \
|
||||||
|
rm -rf /etc/ssl/certs/java/cacerts && \
|
||||||
|
rm -rf /var/log/* /var/cache/ldconfig/aux-cache
|
||||||
|
|
||||||
|
# Download H2ORestart from GitHub using a pinned version and hash. Note that
|
||||||
|
# it's available in Debian repos, but not in Bookworm yet.
|
||||||
|
RUN mkdir /opt/libreoffice_ext && cd /opt/libreoffice_ext \
|
||||||
|
&& H2ORESTART_FILENAME=h2orestart.oxt \
|
||||||
|
&& wget https://github.com/ebandal/H2Orestart/releases/download/$H2ORESTART_VERSION/$H2ORESTART_FILENAME \
|
||||||
|
&& echo "$H2ORESTART_CHECKSUM $H2ORESTART_FILENAME" | sha256sum -c \
|
||||||
|
&& install -dm777 "/usr/lib/libreoffice/share/extensions/" \
|
||||||
|
&& rm /root/.wget-hsts
|
||||||
|
|
||||||
|
# Create an unprivileged user both for gVisor and for running Dangerzone.
|
||||||
|
RUN addgroup --gid 1000 dangerzone
|
||||||
|
RUN adduser --uid 1000 --ingroup dangerzone --shell /bin/true \
|
||||||
|
--disabled-password --home /home/dangerzone dangerzone
|
||||||
|
|
||||||
|
# Copy Dangerzone's conversion logic under /opt/dangerzone, and allow Python to
|
||||||
|
# import it.
|
||||||
|
RUN mkdir -p /opt/dangerzone/dangerzone
|
||||||
|
RUN touch /opt/dangerzone/dangerzone/__init__.py
|
||||||
|
|
||||||
|
# Copy only the Python code, and not any produced .pyc files.
|
||||||
|
COPY conversion/*.py /opt/dangerzone/dangerzone/conversion/
|
||||||
|
|
||||||
|
# Create a directory that will be used by gVisor as the place where it will
|
||||||
|
# store the state of its containers.
|
||||||
|
RUN mkdir /home/dangerzone/.containers
|
||||||
|
|
||||||
|
###############################################################################
|
||||||
|
#
|
||||||
|
# REUSING CONTAINER IMAGES:
|
||||||
|
# Anatomy of a hack
|
||||||
|
# ========================
|
||||||
|
#
|
||||||
|
# The rest of the Dockerfile aims to do one thing: allow the final container
|
||||||
|
# image to actually contain two container images; one for the outer container
|
||||||
|
# (spawned by Podman/Docker Desktop), and one for the inner container (spawned
|
||||||
|
# by gVisor).
|
||||||
|
#
|
||||||
|
# This has already been done in the past, and we explain why and how in the
|
||||||
|
# design document for gVisor integration (should be in
|
||||||
|
# `docs/developer/gvisor.md`). In this iteration, we want to also
|
||||||
|
# achieve the following:
|
||||||
|
#
|
||||||
|
# 1. Have a small final image, by sharing some system paths between the inner
|
||||||
|
# and outer container image using symlinks.
|
||||||
|
# 2. Allow our security scanning tool to see the contents of the inner
|
||||||
|
# container image.
|
||||||
|
# 3. Make the outer container image operational, in the sense that you can use
|
||||||
|
# `apt` commands and perform a conversion with Dangerzone, outside the
|
||||||
|
# gVisor sandbox. This is helpful for debugging purposes.
|
||||||
|
#
|
||||||
|
# Below we'll explain how our design choices are informed by the above
|
||||||
|
# sub-goals.
|
||||||
|
#
|
||||||
|
# First, to achieve a small container image, we basically need to copy `/etc`,
|
||||||
|
# `/usr` and `/opt` from the original Dangerzone image to the **inner**
|
||||||
|
# container image (under `/home/dangerzone/dangerzone-image/rootfs/`)
|
||||||
|
#
|
||||||
|
# That's all we need. The rest of the files play no role, and we can actually
|
||||||
|
# mask them in gVisor's OCI config.
|
||||||
|
#
|
||||||
|
# Second, in order to let our security scanner find the installed packages,
|
||||||
|
# we need to copy the following dirs to the root of the **outer** container
|
||||||
|
# image:
|
||||||
|
# * `/etc`, so that the security scanner can detect the image type and its
|
||||||
|
# sources
|
||||||
|
# * `/var`, so that the security scanner can have access to the APT database.
|
||||||
|
#
|
||||||
|
# IMPORTANT: We don't symlink the `/etc` of the **outer** container image to
|
||||||
|
# the **inner** one, in order to avoid leaking files like
|
||||||
|
# `/etc/{hostname,hosts,resolv.conf}` that Podman/Docker mounts when running
|
||||||
|
# the **outer** container image.
|
||||||
|
#
|
||||||
|
# Third, in order to have an operational Debian image, we are _mostly_ covered
|
||||||
|
# by the dirs we have copied. There's a _rare_ case where during debugging, we
|
||||||
|
# may want to install a system package that has components in `/etc` and
|
||||||
|
# `/var`, which will not be available in the **inner** container image. In that
|
||||||
|
# case, the developer can do the necessary symlinks in the live container.
|
||||||
|
#
|
||||||
|
# FILESYSTEM HIERARCHY
|
||||||
|
# ====================
|
||||||
|
#
|
||||||
|
# The above plan leads to the following filesystem hierarchy:
|
||||||
|
#
|
||||||
|
# Outer container image:
|
||||||
|
#
|
||||||
|
# # ls -l /
|
||||||
|
# lrwxrwxrwx 1 root root 7 Jan 27 10:46 bin -> usr/bin
|
||||||
|
# -rwxr-xr-x 1 root root 7764 Jan 24 08:14 entrypoint.py
|
||||||
|
# drwxr-xr-x 1 root root 4096 Jan 27 10:47 etc
|
||||||
|
# drwxr-xr-x 1 root root 4096 Jan 27 10:46 home
|
||||||
|
# lrwxrwxrwx 1 root root 7 Jan 27 10:46 lib -> usr/lib
|
||||||
|
# lrwxrwxrwx 1 root root 9 Jan 27 10:46 lib64 -> usr/lib64
|
||||||
|
# drwxr-xr-x 2 root root 4096 Jan 27 10:46 root
|
||||||
|
# drwxr-xr-x 1 root root 4096 Jan 27 10:47 run
|
||||||
|
# lrwxrwxrwx 1 root root 8 Jan 27 10:46 sbin -> usr/sbin
|
||||||
|
# drwxrwxrwx 2 root root 4096 Jan 27 10:46 tmp
|
||||||
|
# lrwxrwxrwx 1 root root 44 Jan 27 10:46 usr -> /home/dangerzone/dangerzone-image/rootfs/usr
|
||||||
|
# drwxr-xr-x 11 root root 4096 Jan 27 10:47 var
|
||||||
|
#
|
||||||
|
# Inner container image:
|
||||||
|
#
|
||||||
|
# # ls -l /home/dangerzone/dangerzone-image/rootfs/
|
||||||
|
# total 12
|
||||||
|
# lrwxrwxrwx 1 root root 7 Jan 27 10:47 bin -> usr/bin
|
||||||
|
# drwxr-xr-x 43 root root 4096 Jan 27 10:46 etc
|
||||||
|
# lrwxrwxrwx 1 root root 7 Jan 27 10:47 lib -> usr/lib
|
||||||
|
# lrwxrwxrwx 1 root root 9 Jan 27 10:47 lib64 -> usr/lib64
|
||||||
|
# drwxr-xr-x 4 root root 4096 Jan 27 10:47 opt
|
||||||
|
# drwxr-xr-x 12 root root 4096 Jan 27 10:47 usr
|
||||||
|
#
|
||||||
|
# SYMLINKING /USR
|
||||||
|
# ===============
|
||||||
|
#
|
||||||
|
# It's surprisingly difficult (maybe even borderline impossible), to symlink
|
||||||
|
# `/usr` to a different path during image build. The problem is that /usr
|
||||||
|
# is very sensitive, and you can't manipulate it in a live system. That is, I
|
||||||
|
# haven't found a way to do the following, or something equivalent:
|
||||||
|
#
|
||||||
|
# rm -r /usr && ln -s /home/dangerzone/dangerzone-image/rootfs/usr/ /usr
|
||||||
|
#
|
||||||
|
# The `ln` binary, even if you specify it by its full path, cannot run
|
||||||
|
# (probably because `ld-linux.so` can't be found). For this reason, we have
|
||||||
|
# to create the symlinks beforehand, in a previous build stage. Then, in an
|
||||||
|
# empty contianer image (scratch images), we can copy these symlinks and the
|
||||||
|
# /usr, and stich everything together.
|
||||||
|
###############################################################################
|
||||||
|
|
||||||
|
# Create the filesystem hierarchy that will be used to symlink /usr.
|
||||||
|
|
||||||
|
RUN mkdir /new_root
|
||||||
|
RUN mkdir /new_root/root /new_root/run /new_root/tmp
|
||||||
|
RUN chmod 777 /new_root/tmp
|
||||||
|
RUN ln -s /home/dangerzone/dangerzone-image/rootfs/usr /new_root/usr
|
||||||
|
RUN ln -s usr/bin /new_root/bin
|
||||||
|
RUN ln -s usr/lib /new_root/lib
|
||||||
|
RUN ln -s usr/lib64 /new_root/lib64
|
||||||
|
RUN ln -s usr/sbin /new_root/sbin
|
||||||
|
|
||||||
|
## Final image
|
||||||
|
|
||||||
|
FROM scratch
|
||||||
|
|
||||||
|
# Copy the filesystem hierarchy that we created in the previous stage, so that
|
||||||
|
# /usr can be a symlink.
|
||||||
|
COPY --from=dangerzone-image /new_root/ /
|
||||||
|
|
||||||
|
# Copy the bare minimum to run Dangerzone in the inner container image.
|
||||||
|
COPY --from=dangerzone-image /etc/ /home/dangerzone/dangerzone-image/rootfs/etc/
|
||||||
|
COPY --from=dangerzone-image /opt/ /home/dangerzone/dangerzone-image/rootfs/opt/
|
||||||
|
COPY --from=dangerzone-image /usr/ /home/dangerzone/dangerzone-image/rootfs/usr/
|
||||||
|
RUN ln -s usr/bin /home/dangerzone/dangerzone-image/rootfs/bin
|
||||||
|
RUN ln -s usr/lib /home/dangerzone/dangerzone-image/rootfs/lib
|
||||||
|
RUN ln -s usr/lib64 /home/dangerzone/dangerzone-image/rootfs/lib64
|
||||||
|
|
||||||
|
# Copy the bare minimum to let the security scanner find vulnerabilities.
|
||||||
|
COPY --from=dangerzone-image /etc/ /etc/
|
||||||
|
COPY --from=dangerzone-image /var/ /var/
|
||||||
|
|
||||||
|
# Allow our entrypoint script to make changes in the following folders.
|
||||||
|
RUN chown dangerzone:dangerzone /home/dangerzone /home/dangerzone/dangerzone-image/
|
||||||
|
|
||||||
|
# Switch to the dangerzone user for the rest of the script.
|
||||||
|
USER dangerzone
|
||||||
|
|
||||||
|
COPY container_helpers/entrypoint.py /
|
||||||
|
|
||||||
|
ENTRYPOINT ["/entrypoint.py"]
|
3
Makefile
3
Makefile
|
@ -47,6 +47,9 @@ test-large: test-large-init ## Run large test set
|
||||||
python -m pytest --tb=no tests/test_large_set.py::TestLargeSet -v $(JUNIT_FLAGS) --junitxml=$(TEST_LARGE_RESULTS)
|
python -m pytest --tb=no tests/test_large_set.py::TestLargeSet -v $(JUNIT_FLAGS) --junitxml=$(TEST_LARGE_RESULTS)
|
||||||
python $(TEST_LARGE_RESULTS)/report.py $(TEST_LARGE_RESULTS)
|
python $(TEST_LARGE_RESULTS)/report.py $(TEST_LARGE_RESULTS)
|
||||||
|
|
||||||
|
Dockerfile: Dockerfile.env Dockerfile.in
|
||||||
|
poetry run jinja2 Dockerfile.in Dockerfile.env > Dockerfile
|
||||||
|
|
||||||
.PHONY: build-clean
|
.PHONY: build-clean
|
||||||
build-clean:
|
build-clean:
|
||||||
doit clean
|
doit clean
|
||||||
|
|
|
@ -15,6 +15,7 @@ Here is a list of tasks that should be done before issuing the release:
|
||||||
- [ ] Update the "Version" field in `install/linux/dangerzone.spec`
|
- [ ] Update the "Version" field in `install/linux/dangerzone.spec`
|
||||||
- [ ] Bump the Debian version by adding a new changelog entry in `debian/changelog`
|
- [ ] Bump the Debian version by adding a new changelog entry in `debian/changelog`
|
||||||
- [ ] [Bump the minimum Docker Desktop versions](https://github.com/freedomofpress/dangerzone/blob/main/RELEASE.md#bump-the-minimum-docker-desktop-version) in `isolation_provider/container.py`
|
- [ ] [Bump the minimum Docker Desktop versions](https://github.com/freedomofpress/dangerzone/blob/main/RELEASE.md#bump-the-minimum-docker-desktop-version) in `isolation_provider/container.py`
|
||||||
|
- [ ] Bump the dates and versions in the `Dockerfile`
|
||||||
- [ ] Update screenshot in `README.md`, if necessary
|
- [ ] Update screenshot in `README.md`, if necessary
|
||||||
- [ ] CHANGELOG.md should be updated to include a list of all major changes since the last release
|
- [ ] CHANGELOG.md should be updated to include a list of all major changes since the last release
|
||||||
- [ ] A draft release should be created. Copy the release notes text from the template at [`docs/templates/release-notes`](https://github.com/freedomofpress/dangerzone/tree/main/docs/templates/)
|
- [ ] A draft release should be created. Copy the release notes text from the template at [`docs/templates/release-notes`](https://github.com/freedomofpress/dangerzone/tree/main/docs/templates/)
|
||||||
|
|
14
THIRD_PARTY_NOTICE
Normal file
14
THIRD_PARTY_NOTICE
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
This project includes third-party components as follows:
|
||||||
|
|
||||||
|
1. gVisor APT Key
|
||||||
|
- URL: https://gvisor.dev/archive.key
|
||||||
|
- Last updated: 2025-01-21
|
||||||
|
- Description: This is the public key used for verifying packages from the gVisor repository.
|
||||||
|
|
||||||
|
2. Reproducible Containers Helper Script
|
||||||
|
- URL: https://github.com/reproducible-containers/repro-sources-list.sh/blob/d15cf12b26395b857b24fba223b108aff1c91b26/repro-sources-list.sh
|
||||||
|
- Last updated: 2025-01-21
|
||||||
|
- Description: This script is used for building reproducible Debian images.
|
||||||
|
|
||||||
|
Please refer to the respective sources for licensing information and further details regarding the use of these components.
|
||||||
|
|
|
@ -59,10 +59,28 @@ oci_config: dict[str, typing.Any] = {
|
||||||
"root": {"path": "rootfs", "readonly": True},
|
"root": {"path": "rootfs", "readonly": True},
|
||||||
"hostname": "dangerzone",
|
"hostname": "dangerzone",
|
||||||
"mounts": [
|
"mounts": [
|
||||||
|
# Mask almost every system directory of the outer container, by mounting tmpfs
|
||||||
|
# on top of them. This is done to avoid leaking any sensitive information,
|
||||||
|
# either mounted by Podman/Docker, or when gVisor runs, since we reuse the same
|
||||||
|
# rootfs. We basically mask everything except for `/usr`, `/bin`, `/lib`,
|
||||||
|
# `/etc`, and `/opt`.
|
||||||
|
#
|
||||||
|
# Note that we set `--root /home/dangerzone/.containers` for the directory where
|
||||||
|
# gVisor will create files at runtime, which means that in principle, we are
|
||||||
|
# covered by the masking of `/home/dangerzone` that follows below.
|
||||||
|
#
|
||||||
|
# Finally, note that the following list has been taken from the dirs in our
|
||||||
|
# container image, and double-checked against the top-level dirs listed in the
|
||||||
|
# Filesystem Hierarchy Standard (FHS) [1]. It would be nice to have an allowlist
|
||||||
|
# approach instead of a denylist, but FHS is such an old standard that we don't
|
||||||
|
# expect any new top-level dirs to pop up any time soon.
|
||||||
|
#
|
||||||
|
# [1] https://en.wikipedia.org/wiki/Filesystem_Hierarchy_Standard
|
||||||
{
|
{
|
||||||
"destination": "/proc",
|
"destination": "/boot",
|
||||||
"type": "proc",
|
"type": "tmpfs",
|
||||||
"source": "proc",
|
"source": "tmpfs",
|
||||||
|
"options": ["nosuid", "noexec", "nodev", "ro"],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"destination": "/dev",
|
"destination": "/dev",
|
||||||
|
@ -70,6 +88,53 @@ oci_config: dict[str, typing.Any] = {
|
||||||
"source": "tmpfs",
|
"source": "tmpfs",
|
||||||
"options": ["nosuid", "noexec", "nodev"],
|
"options": ["nosuid", "noexec", "nodev"],
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"destination": "/home",
|
||||||
|
"type": "tmpfs",
|
||||||
|
"source": "tmpfs",
|
||||||
|
"options": ["nosuid", "noexec", "nodev", "ro"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"destination": "/media",
|
||||||
|
"type": "tmpfs",
|
||||||
|
"source": "tmpfs",
|
||||||
|
"options": ["nosuid", "noexec", "nodev", "ro"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"destination": "/mnt",
|
||||||
|
"type": "tmpfs",
|
||||||
|
"source": "tmpfs",
|
||||||
|
"options": ["nosuid", "noexec", "nodev", "ro"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"destination": "/proc",
|
||||||
|
"type": "proc",
|
||||||
|
"source": "proc",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"destination": "/root",
|
||||||
|
"type": "tmpfs",
|
||||||
|
"source": "tmpfs",
|
||||||
|
"options": ["nosuid", "noexec", "nodev", "ro"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"destination": "/run",
|
||||||
|
"type": "tmpfs",
|
||||||
|
"source": "tmpfs",
|
||||||
|
"options": ["nosuid", "noexec", "nodev"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"destination": "/sbin",
|
||||||
|
"type": "tmpfs",
|
||||||
|
"source": "tmpfs",
|
||||||
|
"options": ["nosuid", "noexec", "nodev", "ro"],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"destination": "/srv",
|
||||||
|
"type": "tmpfs",
|
||||||
|
"source": "tmpfs",
|
||||||
|
"options": ["nosuid", "noexec", "nodev", "ro"],
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"destination": "/sys",
|
"destination": "/sys",
|
||||||
"type": "tmpfs",
|
"type": "tmpfs",
|
||||||
|
@ -82,6 +147,12 @@ oci_config: dict[str, typing.Any] = {
|
||||||
"source": "tmpfs",
|
"source": "tmpfs",
|
||||||
"options": ["nosuid", "noexec", "nodev"],
|
"options": ["nosuid", "noexec", "nodev"],
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"destination": "/var",
|
||||||
|
"type": "tmpfs",
|
||||||
|
"source": "tmpfs",
|
||||||
|
"options": ["nosuid", "noexec", "nodev"],
|
||||||
|
},
|
||||||
# LibreOffice needs a writable home directory, so just mount a tmpfs
|
# LibreOffice needs a writable home directory, so just mount a tmpfs
|
||||||
# over it.
|
# over it.
|
||||||
{
|
{
|
29
dangerzone/container_helpers/gvisor.key
Normal file
29
dangerzone/container_helpers/gvisor.key
Normal file
|
@ -0,0 +1,29 @@
|
||||||
|
-----BEGIN PGP PUBLIC KEY BLOCK-----
|
||||||
|
|
||||||
|
mQINBF0meAYBEACcBYPOSBiKtid+qTQlbgKGPxUYt0cNZiQqWXylhYUT4PuNlNx5
|
||||||
|
s+sBLFvNTpdTrXMmZ8NkekyjD1HardWvebvJT4u+Ho/9jUr4rP71cNwNtocz/w8G
|
||||||
|
DsUXSLgH8SDkq6xw0L+5eGc78BBg9cOeBeFBm3UPgxTBXS9Zevoi2w1lzSxkXvjx
|
||||||
|
cGzltzMZfPXERljgLzp9AAfhg/2ouqVQm37fY+P/NDzFMJ1XHPIIp9KJl/prBVud
|
||||||
|
jJJteFZ5sgL6MwjBQq2kw+q2Jb8Zfjl0BeXDgGMN5M5lGhX2wTfiMbfo7KWyzRnB
|
||||||
|
RpSP3BxlLqYeQUuLG5Yx8z3oA3uBkuKaFOKvXtiScxmGM/+Ri2YM3m66imwDhtmP
|
||||||
|
AKwTPI3Re4gWWOffglMVSv2sUAY32XZ74yXjY1VhK3bN3WFUPGrgQx4X7GP0A1Te
|
||||||
|
lzqkT3VSMXieImTASosK5L5Q8rryvgCeI9tQLn9EpYFCtU3LXvVgTreGNEEjMOnL
|
||||||
|
dR7yOU+Fs775stn6ucqmdYarx7CvKUrNAhgEeHMonLe1cjYScF7NfLO1GIrQKJR2
|
||||||
|
DE0f+uJZ52inOkO8ufh3WVQJSYszuS3HCY7w5oj1aP38k/y9zZdZvVvwAWZaiqBQ
|
||||||
|
iwjVs6Kub76VVZZhRDf4iYs8k1Zh64nXdfQt250d8U5yMPF3wIJ+c1yhxwARAQAB
|
||||||
|
tCpUaGUgZ1Zpc29yIEF1dGhvcnMgPGd2aXNvci1ib3RAZ29vZ2xlLmNvbT6JAk4E
|
||||||
|
EwEKADgCGwMFCwkIBwIGFQoJCAsCBBYCAwECHgECF4AWIQRvHfheOnHCSRjnJ9Vv
|
||||||
|
xtVU4yvZQwUCYO4TxQAKCRBvxtVU4yvZQ9UoEACLPV7CnEA2bjCPi0NCWB/Mo1WL
|
||||||
|
evqv7Wv7vmXzI1K9DrqOhxuamQW75SVXg1df0hTJWbKFmDAip6NEC2Rg5P+A8hHj
|
||||||
|
nW/VG+q4ZFT662jDhnXQiO9L7EZzjyqNF4yWYzzgnqEu/SmGkDLDYiUCcGBqS2oE
|
||||||
|
EQfk7RHJSLMJXAnNDH7OUDgrirSssg/dlQ5uAHA9Au80VvC5fsTKza8b3Aydw3SV
|
||||||
|
iB8/Yuikbl8wKbpSGiXtR4viElXjNips0+mBqaUk2xpqSBrsfN+FezcInVXaXFeq
|
||||||
|
xtpq2/3M3DYbqCRjqeyd9wNi92FHdOusNrK4MYe0pAYbGjc65BwH+F0T4oJ8ZSJV
|
||||||
|
lIt+FZ0MqM1T97XadybYFsJh8qvajQpZEPL+zzNncc4f1d80e7+lwIZV/al0FZWW
|
||||||
|
Zlp7TpbeO/uW+lHs5W14YKwaQVh1whapKXTrATipNOOSCw2hnfrT8V7Hy55QWaGZ
|
||||||
|
f4/kfy929EeCP16d/LqOClv0j0RBr6NhRBQ0l/BE/mXjJwIk6nKwi+Yi4ek1ARi6
|
||||||
|
AlCMLn9AZF7aTGpvCiftzIrlyDfVZT5IX03TayxRHZ4b1Rj8eyJaHcjI49u83gkr
|
||||||
|
4LGX08lEawn9nxFSx4RCg2swGiYw5F436wwwAIozqJuDASeTa3QND3au5v0oYWnl
|
||||||
|
umDySUl5wPaAaALgzA==
|
||||||
|
=5/8T
|
||||||
|
-----END PGP PUBLIC KEY BLOCK-----
|
103
dangerzone/container_helpers/repro-sources-list.sh
Executable file
103
dangerzone/container_helpers/repro-sources-list.sh
Executable file
|
@ -0,0 +1,103 @@
|
||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# Copyright The repro-sources-list.sh Authors.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
# repro-sources-list.sh:
|
||||||
|
# configures /etc/apt/sources.list and similar files for installing packages from a snapshot.
|
||||||
|
#
|
||||||
|
# This script is expected to be executed inside Dockerfile.
|
||||||
|
#
|
||||||
|
# The following distributions are supported:
|
||||||
|
# - debian:11 (/etc/apt/sources.list)
|
||||||
|
# - debian:12 (/etc/apt/sources.list.d/debian.sources)
|
||||||
|
# - ubuntu:22.04 (/etc/apt/sources.list)
|
||||||
|
# - ubuntu:24.04 (/etc/apt/sources.listd/ubuntu.sources)
|
||||||
|
# - archlinux (/etc/pacman.d/mirrorlist)
|
||||||
|
#
|
||||||
|
# For the further information, see https://github.com/reproducible-containers/repro-sources-list.sh
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
set -eux -o pipefail
|
||||||
|
|
||||||
|
. /etc/os-release
|
||||||
|
|
||||||
|
: "${KEEP_CACHE:=1}"
|
||||||
|
|
||||||
|
keep_apt_cache() {
|
||||||
|
rm -f /etc/apt/apt.conf.d/docker-clean
|
||||||
|
echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' >/etc/apt/apt.conf.d/keep-cache
|
||||||
|
}
|
||||||
|
|
||||||
|
case "${ID}" in
|
||||||
|
"debian")
|
||||||
|
: "${SNAPSHOT_ARCHIVE_BASE:=http://snapshot.debian.org/archive/}"
|
||||||
|
: "${BACKPORTS:=}"
|
||||||
|
if [ -e /etc/apt/sources.list.d/debian.sources ]; then
|
||||||
|
: "${SOURCE_DATE_EPOCH:=$(stat --format=%Y /etc/apt/sources.list.d/debian.sources)}"
|
||||||
|
rm -f /etc/apt/sources.list.d/debian.sources
|
||||||
|
else
|
||||||
|
: "${SOURCE_DATE_EPOCH:=$(stat --format=%Y /etc/apt/sources.list)}"
|
||||||
|
fi
|
||||||
|
snapshot="$(printf "%(%Y%m%dT%H%M%SZ)T\n" "${SOURCE_DATE_EPOCH}")"
|
||||||
|
# TODO: use the new format for Debian >= 12
|
||||||
|
echo "deb [check-valid-until=no] ${SNAPSHOT_ARCHIVE_BASE}debian/${snapshot} ${VERSION_CODENAME} main" >/etc/apt/sources.list
|
||||||
|
echo "deb [check-valid-until=no] ${SNAPSHOT_ARCHIVE_BASE}debian-security/${snapshot} ${VERSION_CODENAME}-security main" >>/etc/apt/sources.list
|
||||||
|
echo "deb [check-valid-until=no] ${SNAPSHOT_ARCHIVE_BASE}debian/${snapshot} ${VERSION_CODENAME}-updates main" >>/etc/apt/sources.list
|
||||||
|
if [ "${BACKPORTS}" = 1 ]; then echo "deb [check-valid-until=no] ${SNAPSHOT_ARCHIVE_BASE}debian/${snapshot} ${VERSION_CODENAME}-backports main" >>/etc/apt/sources.list; fi
|
||||||
|
if [ "${KEEP_CACHE}" = 1 ]; then keep_apt_cache; fi
|
||||||
|
;;
|
||||||
|
"ubuntu")
|
||||||
|
: "${SNAPSHOT_ARCHIVE_BASE:=http://snapshot.ubuntu.com/}"
|
||||||
|
if [ -e /etc/apt/sources.list.d/ubuntu.sources ]; then
|
||||||
|
: "${SOURCE_DATE_EPOCH:=$(stat --format=%Y /etc/apt/sources.list.d/ubuntu.sources)}"
|
||||||
|
rm -f /etc/apt/sources.list.d/ubuntu.sources
|
||||||
|
else
|
||||||
|
: "${SOURCE_DATE_EPOCH:=$(stat --format=%Y /etc/apt/sources.list)}"
|
||||||
|
fi
|
||||||
|
snapshot="$(printf "%(%Y%m%dT%H%M%SZ)T\n" "${SOURCE_DATE_EPOCH}")"
|
||||||
|
# TODO: use the new format for Ubuntu >= 24.04
|
||||||
|
echo "deb [check-valid-until=no] ${SNAPSHOT_ARCHIVE_BASE}ubuntu/${snapshot} ${VERSION_CODENAME} main restricted" >/etc/apt/sources.list
|
||||||
|
echo "deb [check-valid-until=no] ${SNAPSHOT_ARCHIVE_BASE}ubuntu/${snapshot} ${VERSION_CODENAME}-updates main restricted" >>/etc/apt/sources.list
|
||||||
|
echo "deb [check-valid-until=no] ${SNAPSHOT_ARCHIVE_BASE}ubuntu/${snapshot} ${VERSION_CODENAME} universe" >>/etc/apt/sources.list
|
||||||
|
echo "deb [check-valid-until=no] ${SNAPSHOT_ARCHIVE_BASE}ubuntu/${snapshot} ${VERSION_CODENAME}-updates universe" >>/etc/apt/sources.list
|
||||||
|
echo "deb [check-valid-until=no] ${SNAPSHOT_ARCHIVE_BASE}ubuntu/${snapshot} ${VERSION_CODENAME} multiverse" >>/etc/apt/sources.list
|
||||||
|
echo "deb [check-valid-until=no] ${SNAPSHOT_ARCHIVE_BASE}ubuntu/${snapshot} ${VERSION_CODENAME}-updates multiverse" >>/etc/apt/sources.list
|
||||||
|
echo "deb [check-valid-until=no] ${SNAPSHOT_ARCHIVE_BASE}ubuntu/${snapshot} ${VERSION_CODENAME}-backports main restricted universe multiverse" >>/etc/apt/sources.list
|
||||||
|
echo "deb [check-valid-until=no] ${SNAPSHOT_ARCHIVE_BASE}ubuntu/${snapshot} ${VERSION_CODENAME}-security main restricted" >>/etc/apt/sources.list
|
||||||
|
echo "deb [check-valid-until=no] ${SNAPSHOT_ARCHIVE_BASE}ubuntu/${snapshot} ${VERSION_CODENAME}-security universe" >>/etc/apt/sources.list
|
||||||
|
echo "deb [check-valid-until=no] ${SNAPSHOT_ARCHIVE_BASE}ubuntu/${snapshot} ${VERSION_CODENAME}-security multiverse" >>/etc/apt/sources.list
|
||||||
|
if [ "${KEEP_CACHE}" = 1 ]; then keep_apt_cache; fi
|
||||||
|
# http://snapshot.ubuntu.com is redirected to https, so we have to install ca-certificates
|
||||||
|
export DEBIAN_FRONTEND=noninteractive
|
||||||
|
apt-get -o Acquire::https::Verify-Peer=false update >&2
|
||||||
|
apt-get -o Acquire::https::Verify-Peer=false install -y ca-certificates >&2
|
||||||
|
;;
|
||||||
|
"arch")
|
||||||
|
: "${SNAPSHOT_ARCHIVE_BASE:=http://archive.archlinux.org/}"
|
||||||
|
: "${SOURCE_DATE_EPOCH:=$(stat --format=%Y /var/log/pacman.log)}"
|
||||||
|
export SOURCE_DATE_EPOCH
|
||||||
|
# shellcheck disable=SC2016
|
||||||
|
date -d "@${SOURCE_DATE_EPOCH}" "+Server = ${SNAPSHOT_ARCHIVE_BASE}repos/%Y/%m/%d/\$repo/os/\$arch" >/etc/pacman.d/mirrorlist
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo >&2 "Unsupported distribution: ${ID}"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
: "${WRITE_SOURCE_DATE_EPOCH:=/dev/null}"
|
||||||
|
echo "${SOURCE_DATE_EPOCH}" >"${WRITE_SOURCE_DATE_EPOCH}"
|
||||||
|
echo "SOURCE_DATE_EPOCH=${SOURCE_DATE_EPOCH}"
|
|
@ -129,6 +129,10 @@ class DocumentToPixels(DangerzoneConverter):
|
||||||
# At least .odt, .docx, .odg, .odp, .ods, and .pptx
|
# At least .odt, .docx, .odg, .odp, .ods, and .pptx
|
||||||
"application/zip": {
|
"application/zip": {
|
||||||
"type": "libreoffice",
|
"type": "libreoffice",
|
||||||
|
# NOTE: `file` command < 5.45 cannot detect hwpx files properly, so we
|
||||||
|
# enable the extension in any case. See also:
|
||||||
|
# https://github.com/freedomofpress/dangerzone/pull/460#issuecomment-1654166465
|
||||||
|
"libreoffice_ext": "h2orestart.oxt",
|
||||||
},
|
},
|
||||||
# At least .doc, .docx, .odg, .odp, .odt, .pdf, .ppt, .pptx, .xls, and .xlsx
|
# At least .doc, .docx, .odg, .odp, .odt, .pdf, .ppt, .pptx, .xls, and .xlsx
|
||||||
"application/octet-stream": {
|
"application/octet-stream": {
|
||||||
|
@ -249,7 +253,7 @@ class DocumentToPixels(DangerzoneConverter):
|
||||||
"unzip",
|
"unzip",
|
||||||
"-d",
|
"-d",
|
||||||
f"/usr/lib/libreoffice/share/extensions/{libreoffice_ext}/",
|
f"/usr/lib/libreoffice/share/extensions/{libreoffice_ext}/",
|
||||||
f"/libreoffice_ext/{libreoffice_ext}",
|
f"/opt/libreoffice_ext/{libreoffice_ext}",
|
||||||
]
|
]
|
||||||
await self.run_command(
|
await self.run_command(
|
||||||
unzip_args,
|
unzip_args,
|
||||||
|
|
2
debian/rules
vendored
2
debian/rules
vendored
|
@ -9,5 +9,5 @@ export DH_VERBOSE=1
|
||||||
dh $@ --with python3 --buildsystem=pybuild
|
dh $@ --with python3 --buildsystem=pybuild
|
||||||
|
|
||||||
override_dh_builddeb:
|
override_dh_builddeb:
|
||||||
./install/linux/vendor-pymupdf.py --dest debian/dangerzone/usr/lib/python3/dist-packages/dangerzone/vendor/
|
./install/linux/debian-vendor-pymupdf.py --dest debian/dangerzone/usr/lib/python3/dist-packages/dangerzone/vendor/
|
||||||
dh_builddeb $@
|
dh_builddeb $@
|
||||||
|
|
180
dev_scripts/reproduce-image.py
Executable file
180
dev_scripts/reproduce-image.py
Executable file
|
@ -0,0 +1,180 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import hashlib
|
||||||
|
import logging
|
||||||
|
import pathlib
|
||||||
|
import stat
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import urllib.request
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
DIFFOCI_URL = "https://github.com/reproducible-containers/diffoci/releases/download/v0.1.5/diffoci-v0.1.5.linux-amd64"
|
||||||
|
DIFFOCI_CHECKSUM = "01d25fe690196945a6bd510d30559338aa489c034d3a1b895a0d82a4b860698f"
|
||||||
|
DIFFOCI_PATH = (
|
||||||
|
pathlib.Path.home() / ".local" / "share" / "dangerzone-dev" / "helpers" / "diffoci"
|
||||||
|
)
|
||||||
|
IMAGE_NAME = "dangerzone.rocks/dangerzone"
|
||||||
|
|
||||||
|
|
||||||
|
def run(*args):
|
||||||
|
"""Simple function that runs a command, validates it, and returns the output"""
|
||||||
|
logger.debug(f"Running command: {' '.join(args)}")
|
||||||
|
return subprocess.run(
|
||||||
|
args,
|
||||||
|
check=True,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
).stdout
|
||||||
|
|
||||||
|
|
||||||
|
def git_commit_get():
|
||||||
|
return run("git", "rev-parse", "--short", "HEAD").decode().strip()
|
||||||
|
|
||||||
|
|
||||||
|
def git_determine_tag():
|
||||||
|
return run("git", "describe", "--long", "--first-parent").decode().strip()[1:]
|
||||||
|
|
||||||
|
|
||||||
|
def git_verify(commit, source):
|
||||||
|
if not commit in source:
|
||||||
|
raise RuntimeError(
|
||||||
|
f"Image '{source}' does not seem to be built from commit '{commit}'"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def diffoci_hash_matches(diffoci):
|
||||||
|
"""Check if the hash of the downloaded diffoci bin matches the expected one."""
|
||||||
|
m = hashlib.sha256()
|
||||||
|
m.update(diffoci)
|
||||||
|
diffoci_checksum = m.hexdigest()
|
||||||
|
return diffoci_checksum == DIFFOCI_CHECKSUM
|
||||||
|
|
||||||
|
|
||||||
|
def diffoci_is_installed():
|
||||||
|
"""Determine if diffoci has been installed.
|
||||||
|
|
||||||
|
Determine if diffoci has been installed, by checking if the binary exists, and if
|
||||||
|
its hash is the expected one. If the binary exists but the hash is different, then
|
||||||
|
this is a sign that we need to update the local diffoci binary.
|
||||||
|
"""
|
||||||
|
if not DIFFOCI_PATH.exists():
|
||||||
|
return False
|
||||||
|
return diffoci_hash_matches(DIFFOCI_PATH.open("rb").read())
|
||||||
|
|
||||||
|
|
||||||
|
def diffoci_download():
|
||||||
|
"""Download the diffoci tool, based on a URL and its checksum."""
|
||||||
|
with urllib.request.urlopen(DIFFOCI_URL) as f:
|
||||||
|
diffoci_bin = f.read()
|
||||||
|
|
||||||
|
if not diffoci_hash_matches(diffoci_bin):
|
||||||
|
raise ValueError(
|
||||||
|
"Unexpected checksum for downloaded diffoci binary:"
|
||||||
|
f" {diffoci_checksum} !={DIFFOCI_CHECKSUM}"
|
||||||
|
)
|
||||||
|
|
||||||
|
DIFFOCI_PATH.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
DIFFOCI_PATH.open("wb+").write(diffoci_bin)
|
||||||
|
DIFFOCI_PATH.chmod(DIFFOCI_PATH.stat().st_mode | stat.S_IEXEC)
|
||||||
|
|
||||||
|
|
||||||
|
def diffoci_diff(source, local_target):
|
||||||
|
"""Diff the source image against the recently built target image using diffoci."""
|
||||||
|
target = f"podman://{local_target}"
|
||||||
|
try:
|
||||||
|
return run(
|
||||||
|
str(DIFFOCI_PATH),
|
||||||
|
"diff",
|
||||||
|
source,
|
||||||
|
target,
|
||||||
|
"--semantic",
|
||||||
|
"--verbose",
|
||||||
|
)
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
error = e.stdout.decode()
|
||||||
|
raise RuntimeError(
|
||||||
|
f"Could not rebuild an identical image to {source}. Diffoci report:\n{error}"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def build_image(tag, use_cache=False):
|
||||||
|
"""Build the Dangerzone container image with a special tag."""
|
||||||
|
run(
|
||||||
|
"python3",
|
||||||
|
"./install/common/build-image.py",
|
||||||
|
"--no-save",
|
||||||
|
"--use-cache",
|
||||||
|
str(use_cache),
|
||||||
|
"--tag",
|
||||||
|
tag,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_args():
|
||||||
|
image_tag = git_determine_tag()
|
||||||
|
# TODO: Remove the local "podman://" prefix once we have started pushing images to a
|
||||||
|
# remote.
|
||||||
|
default_image_name = f"podman://{IMAGE_NAME}:{image_tag}"
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
prog=sys.argv[0],
|
||||||
|
description="Dev script for verifying container image reproducibility",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--source",
|
||||||
|
default=default_image_name,
|
||||||
|
help=(
|
||||||
|
"The name of the image that you want to reproduce. If the image resides in"
|
||||||
|
" the local Docker / Podman engine, you can prefix it with podman:// or"
|
||||||
|
f" docker:// accordingly (default: {default_image_name})"
|
||||||
|
),
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--use-cache",
|
||||||
|
default=False,
|
||||||
|
action="store_true",
|
||||||
|
help="Whether to reuse the build cache (off by default for better reproducibility)",
|
||||||
|
)
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
logging.basicConfig(
|
||||||
|
level=logging.DEBUG,
|
||||||
|
format="%(asctime)s - %(levelname)s - %(message)s",
|
||||||
|
datefmt="%Y-%m-%d %H:%M:%S",
|
||||||
|
)
|
||||||
|
args = parse_args()
|
||||||
|
|
||||||
|
logger.info(f"Ensuring that current Git commit matches image '{args.source}'")
|
||||||
|
commit = git_commit_get()
|
||||||
|
git_verify(commit, args.source)
|
||||||
|
|
||||||
|
if not diffoci_is_installed():
|
||||||
|
logger.info(f"Downloading diffoci helper from {DIFFOCI_URL}")
|
||||||
|
diffoci_download()
|
||||||
|
|
||||||
|
tag = f"reproduce-{commit}"
|
||||||
|
target = f"{IMAGE_NAME}:{tag}"
|
||||||
|
logger.info(f"Building container image and tagging it as '{target}'")
|
||||||
|
build_image(tag, args.use_cache)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"Ensuring that source image '{args.source}' is semantically identical with"
|
||||||
|
f" built image '{target}'"
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
diffoci_diff(args.source, target)
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
raise RuntimeError(
|
||||||
|
f"Could not reproduce image {args.source} for commit {commit}"
|
||||||
|
)
|
||||||
|
breakpoint()
|
||||||
|
|
||||||
|
logger.info(f"Successfully reproduced image '{args.source}' from commit '{commit}'")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
sys.exit(main())
|
|
@ -44,20 +44,6 @@ doit <task>
|
||||||
* You can run `doit list --all -s` to see the full list of tasks, their
|
* You can run `doit list --all -s` to see the full list of tasks, their
|
||||||
dependencies, and whether they are up to date.
|
dependencies, and whether they are up to date.
|
||||||
* You can run `doit info <task>` to see which dependencies are missing.
|
* You can run `doit info <task>` to see which dependencies are missing.
|
||||||
* You can change this line in `pyproject.toml` to `true`, to allow using the
|
|
||||||
Docker/Podman build cache:
|
|
||||||
|
|
||||||
```
|
|
||||||
use_cache = true
|
|
||||||
```
|
|
||||||
|
|
||||||
> [!WARNING]
|
|
||||||
> Using caching may speed up image builds, but is not suitable for release
|
|
||||||
> artifacts. The ID of our base container image (Alpine Linux) does not change
|
|
||||||
> that often, but its APK package index does. So, if we use caching, we risk
|
|
||||||
> skipping the `apk upgrade` layer and end up with packages that are days
|
|
||||||
> behind.
|
|
||||||
|
|
||||||
* You can pass the following environment variables to the script, in order to
|
* You can pass the following environment variables to the script, in order to
|
||||||
affect some global parameters:
|
affect some global parameters:
|
||||||
- `CONTAINER_RUNTIME`: The container runtime to use. Either `podman` (default)
|
- `CONTAINER_RUNTIME`: The container runtime to use. Either `podman` (default)
|
||||||
|
|
|
@ -1,5 +1,11 @@
|
||||||
# gVisor integration
|
# gVisor integration
|
||||||
|
|
||||||
|
> [!NOTE]
|
||||||
|
> **Update on 2025-01-13:** There is no longer a copied container image under
|
||||||
|
> `/home/dangerzone/dangerzone-image/rootfs`. We now reuse the same container
|
||||||
|
> image both for the inner and outer container. See
|
||||||
|
> [#1048](https://github.com/freedomofpress/dangerzone/issues/1048).
|
||||||
|
|
||||||
Dangerzone has relied on the container runtime available in each supported
|
Dangerzone has relied on the container runtime available in each supported
|
||||||
operating system (Docker Desktop on Windows / macOS, Podman on Linux) to isolate
|
operating system (Docker Desktop on Windows / macOS, Podman on Linux) to isolate
|
||||||
the host from the sanitization process. The problem with this type of isolation
|
the host from the sanitization process. The problem with this type of isolation
|
||||||
|
|
67
docs/developer/reproducibility.md
Normal file
67
docs/developer/reproducibility.md
Normal file
|
@ -0,0 +1,67 @@
|
||||||
|
# Reproducible builds
|
||||||
|
|
||||||
|
We want to improve the transparency and auditability of our build artifacts, and
|
||||||
|
a way to achieve this is via reproducible builds. For a broader understanding of
|
||||||
|
what reproducible builds entail, check out https://reproducible-builds.org/.
|
||||||
|
|
||||||
|
Our build artifacts consist of:
|
||||||
|
* Container images (`amd64` and `arm64` architectures)
|
||||||
|
* macOS installers (for Intel and Apple Silicon CPUs)
|
||||||
|
* Windows installer
|
||||||
|
* Fedora packages (for regular Fedora distros and Qubes)
|
||||||
|
* Debian packages (for Debian and Ubuntu)
|
||||||
|
|
||||||
|
As of writing this, only the following artifacts are reproducible:
|
||||||
|
* Container images (see [#1047](https://github.com/freedomofpress/dangerzone/issues/1047))
|
||||||
|
|
||||||
|
In the following sections, we'll mention some specifics about enforcing
|
||||||
|
reproducibility for each artifact type.
|
||||||
|
|
||||||
|
## Container image
|
||||||
|
|
||||||
|
### Updating the image
|
||||||
|
|
||||||
|
The fact that our image is reproducible also means that it's frozen in time.
|
||||||
|
This means that rebuilding the image without updating our Dockerfile will
|
||||||
|
**not** receive security updates.
|
||||||
|
|
||||||
|
Here are the necessary variables that make up our image in the `Dockerfile.env`
|
||||||
|
file:
|
||||||
|
* `DEBIAN_IMAGE_DATE`: The date that the Debian container image was released
|
||||||
|
* `DEBIAN_ARCHIVE_DATE`: The Debian snapshot repo that we want to use
|
||||||
|
* `GVISOR_ARCHIVE_DATE`: The gVisor APT repo that we want to use
|
||||||
|
* `H2ORESTART_CHECKSUM`: The SHA-256 checksum of the H2ORestart plugin
|
||||||
|
* `H2ORESTART_VERSION`: The version of the H2ORestart plugin
|
||||||
|
|
||||||
|
If you update these values in `Dockerfile.env`, you must also create a new
|
||||||
|
Dockerfile with:
|
||||||
|
|
||||||
|
```
|
||||||
|
make Dockerfile
|
||||||
|
```
|
||||||
|
|
||||||
|
Updating `Dockerfile` without bumping `Dockerfile.in` is detected and should
|
||||||
|
trigger a CI error.
|
||||||
|
|
||||||
|
### Reproducing the image
|
||||||
|
|
||||||
|
For a simple way to reproduce a Dangerzone container image, you can checkout the
|
||||||
|
commit this image was built from (you can find it from the image tag in its
|
||||||
|
`g<commit>` portion), and run the following command in a Linux environment:
|
||||||
|
|
||||||
|
```
|
||||||
|
./dev_scripts/reproduce-image.py --source <image>
|
||||||
|
```
|
||||||
|
|
||||||
|
This command will download the `diffoci` helper, build a container image from
|
||||||
|
the current Git commit, and ensure that the built image matches the source one,
|
||||||
|
with the exception of image names and file timestamps.
|
||||||
|
|
||||||
|
> [!TIP]
|
||||||
|
> If the source image is not pushed to a registry, and is local instead, you
|
||||||
|
> can prefix it with `docker://` or `podman://` accordingly, so that `diffoci`
|
||||||
|
> can load it from the local Docker / Podman container engine. For example:
|
||||||
|
>
|
||||||
|
> ```
|
||||||
|
> ./dev_scripts/reproduce.py --source podman://dangerzone.rocks/dangerzone:0.8.0-125-g725ce3b
|
||||||
|
> ```
|
16
dodo.py
16
dodo.py
|
@ -27,16 +27,6 @@ PARAM_APPLE_ID = {
|
||||||
"help": "The Apple developer ID that will be used to sign the .dmg",
|
"help": "The Apple developer ID that will be used to sign the .dmg",
|
||||||
}
|
}
|
||||||
|
|
||||||
PARAM_USE_CACHE = {
|
|
||||||
"name": "use_cache",
|
|
||||||
"long": "use-cache",
|
|
||||||
"help": (
|
|
||||||
"Whether to use cached results or not. For reproducibility reasons,"
|
|
||||||
" it's best to leave it to false"
|
|
||||||
),
|
|
||||||
"default": False,
|
|
||||||
}
|
|
||||||
|
|
||||||
### File dependencies
|
### File dependencies
|
||||||
#
|
#
|
||||||
# Define all the file dependencies for our tasks in a single place, since some file
|
# Define all the file dependencies for our tasks in a single place, since some file
|
||||||
|
@ -63,9 +53,8 @@ TESSDATA_TARGETS = list_language_data()
|
||||||
|
|
||||||
IMAGE_DEPS = [
|
IMAGE_DEPS = [
|
||||||
"Dockerfile",
|
"Dockerfile",
|
||||||
"poetry.lock",
|
|
||||||
*list_files("dangerzone/conversion"),
|
*list_files("dangerzone/conversion"),
|
||||||
"dangerzone/gvisor_wrapper/entrypoint.py",
|
*list_files("dangerzone/container_helpers"),
|
||||||
"install/common/build-image.py",
|
"install/common/build-image.py",
|
||||||
]
|
]
|
||||||
IMAGE_TARGETS = ["share/container.tar.gz", "share/image-id.txt"]
|
IMAGE_TARGETS = ["share/container.tar.gz", "share/image-id.txt"]
|
||||||
|
@ -206,11 +195,10 @@ def task_build_image():
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"actions": [
|
"actions": [
|
||||||
f"python install/common/build-image.py --use-cache=%(use_cache)s --runtime={CONTAINER_RUNTIME}",
|
f"python install/common/build-image.py --runtime={CONTAINER_RUNTIME}",
|
||||||
["cp", img_src, img_dst],
|
["cp", img_src, img_dst],
|
||||||
["cp", img_id_src, img_id_dst],
|
["cp", img_id_src, img_id_dst],
|
||||||
],
|
],
|
||||||
"params": [PARAM_USE_CACHE],
|
|
||||||
"file_dep": IMAGE_DEPS,
|
"file_dep": IMAGE_DEPS,
|
||||||
"targets": [img_src, img_dst, img_id_src, img_id_dst],
|
"targets": [img_src, img_dst, img_id_src, img_id_dst],
|
||||||
"task_dep": ["init_release_dir", "check_container_runtime"],
|
"task_dep": ["init_release_dir", "check_container_runtime"],
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
import argparse
|
import argparse
|
||||||
import gzip
|
import gzip
|
||||||
import os
|
|
||||||
import platform
|
import platform
|
||||||
import secrets
|
import secrets
|
||||||
import subprocess
|
import subprocess
|
||||||
|
@ -9,7 +8,6 @@ from pathlib import Path
|
||||||
|
|
||||||
BUILD_CONTEXT = "dangerzone/"
|
BUILD_CONTEXT = "dangerzone/"
|
||||||
IMAGE_NAME = "dangerzone.rocks/dangerzone"
|
IMAGE_NAME = "dangerzone.rocks/dangerzone"
|
||||||
REQUIREMENTS_TXT = "container-pip-requirements.txt"
|
|
||||||
if platform.system() in ["Darwin", "Windows"]:
|
if platform.system() in ["Darwin", "Windows"]:
|
||||||
CONTAINER_RUNTIME = "docker"
|
CONTAINER_RUNTIME = "docker"
|
||||||
elif platform.system() == "Linux":
|
elif platform.system() == "Linux":
|
||||||
|
@ -29,6 +27,29 @@ def str2bool(v):
|
||||||
raise argparse.ArgumentTypeError("Boolean value expected.")
|
raise argparse.ArgumentTypeError("Boolean value expected.")
|
||||||
|
|
||||||
|
|
||||||
|
def determine_git_tag():
|
||||||
|
# Designate a unique tag for this image, depending on the Git commit it was created
|
||||||
|
# from:
|
||||||
|
# 1. If created from a Git tag (e.g., 0.8.0), the image tag will be `0.8.0`.
|
||||||
|
# 2. If created from a commit, it will be something like `0.8.0-31-g6bdaa7a`.
|
||||||
|
# 3. If the contents of the Git repo are dirty, we will append a unique identifier
|
||||||
|
# for this run, something like `0.8.0-31-g6bdaa7a-fdcb` or `0.8.0-fdcb`.
|
||||||
|
dirty_ident = secrets.token_hex(2)
|
||||||
|
return (
|
||||||
|
subprocess.check_output(
|
||||||
|
[
|
||||||
|
"git",
|
||||||
|
"describe",
|
||||||
|
"--long",
|
||||||
|
"--first-parent",
|
||||||
|
f"--dirty=-{dirty_ident}",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
.decode()
|
||||||
|
.strip()[1:] # remove the "v" prefix of the tag.
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
|
@ -53,9 +74,14 @@ def main():
|
||||||
"--use-cache",
|
"--use-cache",
|
||||||
type=str2bool,
|
type=str2bool,
|
||||||
nargs="?",
|
nargs="?",
|
||||||
default=False,
|
default=True,
|
||||||
const=True,
|
const=True,
|
||||||
help="Use the builder's cache to speed up the builds (not suitable for release builds)",
|
help="Use the builder's cache to speed up the builds",
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--tag",
|
||||||
|
default=None,
|
||||||
|
help="Provide a custom tag for the image (for development only)",
|
||||||
)
|
)
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
@ -64,39 +90,13 @@ def main():
|
||||||
|
|
||||||
print(f"Building for architecture '{ARCH}'")
|
print(f"Building for architecture '{ARCH}'")
|
||||||
|
|
||||||
# Designate a unique tag for this image, depending on the Git commit it was created
|
tag = args.tag or determine_git_tag()
|
||||||
# from:
|
|
||||||
# 1. If created from a Git tag (e.g., 0.8.0), the image tag will be `0.8.0`.
|
|
||||||
# 2. If created from a commit, it will be something like `0.8.0-31-g6bdaa7a`.
|
|
||||||
# 3. If the contents of the Git repo are dirty, we will append a unique identifier
|
|
||||||
# for this run, something like `0.8.0-31-g6bdaa7a-fdcb` or `0.8.0-fdcb`.
|
|
||||||
dirty_ident = secrets.token_hex(2)
|
|
||||||
tag = (
|
|
||||||
subprocess.check_output(
|
|
||||||
["git", "describe", "--long", "--first-parent", f"--dirty=-{dirty_ident}"],
|
|
||||||
)
|
|
||||||
.decode()
|
|
||||||
.strip()[1:] # remove the "v" prefix of the tag.
|
|
||||||
)
|
|
||||||
image_name_tagged = IMAGE_NAME + ":" + tag
|
image_name_tagged = IMAGE_NAME + ":" + tag
|
||||||
|
|
||||||
print(f"Will tag the container image as '{image_name_tagged}'")
|
print(f"Will tag the container image as '{image_name_tagged}'")
|
||||||
with open(image_id_path, "w") as f:
|
with open(image_id_path, "w") as f:
|
||||||
f.write(tag)
|
f.write(tag)
|
||||||
|
|
||||||
print("Exporting container pip dependencies")
|
|
||||||
with ContainerPipDependencies():
|
|
||||||
if not args.use_cache:
|
|
||||||
print("Pulling base image")
|
|
||||||
subprocess.run(
|
|
||||||
[
|
|
||||||
args.runtime,
|
|
||||||
"pull",
|
|
||||||
"alpine:latest",
|
|
||||||
],
|
|
||||||
check=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Build the container image, and tag it with the calculated tag
|
# Build the container image, and tag it with the calculated tag
|
||||||
print("Building container image")
|
print("Building container image")
|
||||||
cache_args = [] if args.use_cache else ["--no-cache"]
|
cache_args = [] if args.use_cache else ["--no-cache"]
|
||||||
|
@ -106,10 +106,6 @@ def main():
|
||||||
"build",
|
"build",
|
||||||
BUILD_CONTEXT,
|
BUILD_CONTEXT,
|
||||||
*cache_args,
|
*cache_args,
|
||||||
"--build-arg",
|
|
||||||
f"REQUIREMENTS_TXT={REQUIREMENTS_TXT}",
|
|
||||||
"--build-arg",
|
|
||||||
f"ARCH={ARCH}",
|
|
||||||
"-f",
|
"-f",
|
||||||
"Dockerfile",
|
"Dockerfile",
|
||||||
"--tag",
|
"--tag",
|
||||||
|
@ -145,31 +141,5 @@ def main():
|
||||||
cmd.wait(5)
|
cmd.wait(5)
|
||||||
|
|
||||||
|
|
||||||
class ContainerPipDependencies:
|
|
||||||
"""Generates PIP dependencies within container"""
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
try:
|
|
||||||
container_requirements_txt = subprocess.check_output(
|
|
||||||
["poetry", "export", "--only", "container"], universal_newlines=True
|
|
||||||
)
|
|
||||||
except subprocess.CalledProcessError as e:
|
|
||||||
print("FAILURE", e.returncode, e.output)
|
|
||||||
print(f"REQUIREMENTS: {container_requirements_txt}")
|
|
||||||
# XXX Export container dependencies and exclude pymupdfb since it is not needed in container
|
|
||||||
req_txt_pymupdfb_stripped = container_requirements_txt.split("pymupdfb")[0]
|
|
||||||
with open(Path(BUILD_CONTEXT) / REQUIREMENTS_TXT, "w") as f:
|
|
||||||
if ARCH == "arm64":
|
|
||||||
# PyMuPDF needs to be built on ARM64 machines
|
|
||||||
# But is already provided as a prebuilt-wheel on other architectures
|
|
||||||
f.write(req_txt_pymupdfb_stripped)
|
|
||||||
else:
|
|
||||||
f.write(container_requirements_txt)
|
|
||||||
|
|
||||||
def __exit__(self, exc_type, exc_value, exc_tb):
|
|
||||||
print("Leaving the context...")
|
|
||||||
os.remove(Path(BUILD_CONTEXT) / REQUIREMENTS_TXT)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
sys.exit(main())
|
sys.exit(main())
|
||||||
|
|
|
@ -216,16 +216,6 @@ convert the documents within a secure sandbox.
|
||||||
%prep
|
%prep
|
||||||
%autosetup -p1 -n dangerzone-%{version}
|
%autosetup -p1 -n dangerzone-%{version}
|
||||||
|
|
||||||
# XXX: Bump the Python requirement in pyproject.toml from <3.13 to <3.14. Fedora
|
|
||||||
# 41 comes with Python 3.13 installed, but our pyproject.toml does not support
|
|
||||||
# it because PySide6 in PyPI works with Python 3.12 or earlier.
|
|
||||||
#
|
|
||||||
# This hack sidesteps this issue, and we haven't noticed any paticular problem
|
|
||||||
# with the package that is built from that.
|
|
||||||
%if 0%{?fedora} == 41
|
|
||||||
sed -i 's/<3.13/<3.14/' pyproject.toml
|
|
||||||
%endif
|
|
||||||
|
|
||||||
# Bypass the version pin for Fedora as the 6.8.1.1 package is causing trouble
|
# Bypass the version pin for Fedora as the 6.8.1.1 package is causing trouble
|
||||||
# A 6.8.1.1 package was only released with a wheel for macOS, but was picked by
|
# A 6.8.1.1 package was only released with a wheel for macOS, but was picked by
|
||||||
# Fedora packagers. We cannot use "*" when PyPI is involved as it will fail to download the latest version.
|
# Fedora packagers. We cannot use "*" when PyPI is involved as it will fail to download the latest version.
|
||||||
|
|
|
@ -28,7 +28,7 @@ def main():
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.info("Getting PyMuPDF deps as requirements.txt")
|
logger.info("Getting PyMuPDF deps as requirements.txt")
|
||||||
cmd = ["poetry", "export", "--only", "container"]
|
cmd = ["poetry", "export", "--only", "debian"]
|
||||||
container_requirements_txt = subprocess.check_output(cmd)
|
container_requirements_txt = subprocess.check_output(cmd)
|
||||||
|
|
||||||
# XXX: Hack for Ubuntu Focal.
|
# XXX: Hack for Ubuntu Focal.
|
149
poetry.lock
generated
149
poetry.lock
generated
|
@ -480,6 +480,43 @@ files = [
|
||||||
{file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
|
{file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "jinja2"
|
||||||
|
version = "3.1.5"
|
||||||
|
description = "A very fast and expressive template engine."
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.7"
|
||||||
|
files = [
|
||||||
|
{file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"},
|
||||||
|
{file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
MarkupSafe = ">=2.0"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
i18n = ["Babel (>=2.7)"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "jinja2-cli"
|
||||||
|
version = "0.8.2"
|
||||||
|
description = "A CLI interface to Jinja2"
|
||||||
|
optional = false
|
||||||
|
python-versions = "*"
|
||||||
|
files = [
|
||||||
|
{file = "jinja2-cli-0.8.2.tar.gz", hash = "sha256:a16bb1454111128e206f568c95938cdef5b5a139929378f72bb8cf6179e18e50"},
|
||||||
|
{file = "jinja2_cli-0.8.2-py2.py3-none-any.whl", hash = "sha256:b91715c79496beaddad790171e7258a87db21c1a0b6d2b15bca3ba44b74aac5d"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
jinja2 = "*"
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
tests = ["flake8", "jinja2", "pytest"]
|
||||||
|
toml = ["jinja2", "toml"]
|
||||||
|
xml = ["jinja2", "xmltodict"]
|
||||||
|
yaml = ["jinja2", "pyyaml"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "lief"
|
name = "lief"
|
||||||
version = "0.16.2"
|
version = "0.16.2"
|
||||||
|
@ -563,6 +600,76 @@ importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""}
|
||||||
docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"]
|
docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"]
|
||||||
testing = ["coverage", "pyyaml"]
|
testing = ["coverage", "pyyaml"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "markupsafe"
|
||||||
|
version = "3.0.2"
|
||||||
|
description = "Safely add untrusted strings to HTML/XML markup."
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.9"
|
||||||
|
files = [
|
||||||
|
{file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"},
|
||||||
|
{file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"},
|
||||||
|
{file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"},
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "mypy"
|
name = "mypy"
|
||||||
version = "1.14.1"
|
version = "1.14.1"
|
||||||
|
@ -1005,29 +1112,29 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ruff"
|
name = "ruff"
|
||||||
version = "0.9.2"
|
version = "0.9.3"
|
||||||
description = "An extremely fast Python linter and code formatter, written in Rust."
|
description = "An extremely fast Python linter and code formatter, written in Rust."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
{file = "ruff-0.9.2-py3-none-linux_armv6l.whl", hash = "sha256:80605a039ba1454d002b32139e4970becf84b5fee3a3c3bf1c2af6f61a784347"},
|
{file = "ruff-0.9.3-py3-none-linux_armv6l.whl", hash = "sha256:7f39b879064c7d9670197d91124a75d118d00b0990586549949aae80cdc16624"},
|
||||||
{file = "ruff-0.9.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b9aab82bb20afd5f596527045c01e6ae25a718ff1784cb92947bff1f83068b00"},
|
{file = "ruff-0.9.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:a187171e7c09efa4b4cc30ee5d0d55a8d6c5311b3e1b74ac5cb96cc89bafc43c"},
|
||||||
{file = "ruff-0.9.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:fbd337bac1cfa96be615f6efcd4bc4d077edbc127ef30e2b8ba2a27e18c054d4"},
|
{file = "ruff-0.9.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:c59ab92f8e92d6725b7ded9d4a31be3ef42688a115c6d3da9457a5bda140e2b4"},
|
||||||
{file = "ruff-0.9.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82b35259b0cbf8daa22a498018e300b9bb0174c2bbb7bcba593935158a78054d"},
|
{file = "ruff-0.9.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dc153c25e715be41bb228bc651c1e9b1a88d5c6e5ed0194fa0dfea02b026439"},
|
||||||
{file = "ruff-0.9.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b6a9701d1e371bf41dca22015c3f89769da7576884d2add7317ec1ec8cb9c3c"},
|
{file = "ruff-0.9.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:646909a1e25e0dc28fbc529eab8eb7bb583079628e8cbe738192853dbbe43af5"},
|
||||||
{file = "ruff-0.9.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9cc53e68b3c5ae41e8faf83a3b89f4a5d7b2cb666dff4b366bb86ed2a85b481f"},
|
{file = "ruff-0.9.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a5a46e09355695fbdbb30ed9889d6cf1c61b77b700a9fafc21b41f097bfbba4"},
|
||||||
{file = "ruff-0.9.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:8efd9da7a1ee314b910da155ca7e8953094a7c10d0c0a39bfde3fcfd2a015684"},
|
{file = "ruff-0.9.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c4bb09d2bbb394e3730d0918c00276e79b2de70ec2a5231cd4ebb51a57df9ba1"},
|
||||||
{file = "ruff-0.9.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3292c5a22ea9a5f9a185e2d131dc7f98f8534a32fb6d2ee7b9944569239c648d"},
|
{file = "ruff-0.9.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96a87ec31dc1044d8c2da2ebbed1c456d9b561e7d087734336518181b26b3aa5"},
|
||||||
{file = "ruff-0.9.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a605fdcf6e8b2d39f9436d343d1f0ff70c365a1e681546de0104bef81ce88df"},
|
{file = "ruff-0.9.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bb7554aca6f842645022fe2d301c264e6925baa708b392867b7a62645304df4"},
|
||||||
{file = "ruff-0.9.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c547f7f256aa366834829a08375c297fa63386cbe5f1459efaf174086b564247"},
|
{file = "ruff-0.9.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cabc332b7075a914ecea912cd1f3d4370489c8018f2c945a30bcc934e3bc06a6"},
|
||||||
{file = "ruff-0.9.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d18bba3d3353ed916e882521bc3e0af403949dbada344c20c16ea78f47af965e"},
|
{file = "ruff-0.9.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:33866c3cc2a575cbd546f2cd02bdd466fed65118e4365ee538a3deffd6fcb730"},
|
||||||
{file = "ruff-0.9.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b338edc4610142355ccf6b87bd356729b62bf1bc152a2fad5b0c7dc04af77bfe"},
|
{file = "ruff-0.9.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:006e5de2621304c8810bcd2ee101587712fa93b4f955ed0985907a36c427e0c2"},
|
||||||
{file = "ruff-0.9.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:492a5e44ad9b22a0ea98cf72e40305cbdaf27fac0d927f8bc9e1df316dcc96eb"},
|
{file = "ruff-0.9.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:ba6eea4459dbd6b1be4e6bfc766079fb9b8dd2e5a35aff6baee4d9b1514ea519"},
|
||||||
{file = "ruff-0.9.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:af1e9e9fe7b1f767264d26b1075ac4ad831c7db976911fa362d09b2d0356426a"},
|
{file = "ruff-0.9.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:90230a6b8055ad47d3325e9ee8f8a9ae7e273078a66401ac66df68943ced029b"},
|
||||||
{file = "ruff-0.9.2-py3-none-win32.whl", hash = "sha256:71cbe22e178c5da20e1514e1e01029c73dc09288a8028a5d3446e6bba87a5145"},
|
{file = "ruff-0.9.3-py3-none-win32.whl", hash = "sha256:eabe5eb2c19a42f4808c03b82bd313fc84d4e395133fb3fc1b1516170a31213c"},
|
||||||
{file = "ruff-0.9.2-py3-none-win_amd64.whl", hash = "sha256:c5e1d6abc798419cf46eed03f54f2e0c3adb1ad4b801119dedf23fcaf69b55b5"},
|
{file = "ruff-0.9.3-py3-none-win_amd64.whl", hash = "sha256:040ceb7f20791dfa0e78b4230ee9dce23da3b64dd5848e40e3bf3ab76468dcf4"},
|
||||||
{file = "ruff-0.9.2-py3-none-win_arm64.whl", hash = "sha256:a1b63fa24149918f8b37cef2ee6fff81f24f0d74b6f0bdc37bc3e1f2143e41c6"},
|
{file = "ruff-0.9.3-py3-none-win_arm64.whl", hash = "sha256:800d773f6d4d33b0a3c60e2c6ae8f4c202ea2de056365acfa519aa48acf28e0b"},
|
||||||
{file = "ruff-0.9.2.tar.gz", hash = "sha256:b5eceb334d55fae5f316f783437392642ae18e16dcf4f1858d55d3c2a0f8f5d0"},
|
{file = "ruff-0.9.3.tar.gz", hash = "sha256:8293f89985a090ebc3ed1064df31f3b4b56320cdfcec8b60d3295bddb955c22a"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1269,5 +1376,5 @@ type = ["pytest-mypy"]
|
||||||
|
|
||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "2.0"
|
lock-version = "2.0"
|
||||||
python-versions = ">=3.9,<3.13"
|
python-versions = ">=3.9,<3.14"
|
||||||
content-hash = "30751b4a27cebd020b5222de2abef4e38fb5f6676fdf936693060e42cd35c25f"
|
content-hash = "c6395d63523761d272dfc5fe6eef7822a081b3b0fb0b739a82efec7de5346d57"
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
[tool.poetry]
|
[tool.poetry]
|
||||||
name = "dangerzone"
|
name = "dangerzone"
|
||||||
version = "0.8.1"
|
version = "0.8.1"
|
||||||
description = "Take potentially dangerous PDFs, office documents, or images and convert them to safe PDFs"
|
description = "Take potentially dangerous PDFs, office documents, or images and convert them to safe PDFs"
|
||||||
|
@ -13,7 +13,7 @@ include = [
|
||||||
]
|
]
|
||||||
|
|
||||||
[tool.poetry.dependencies]
|
[tool.poetry.dependencies]
|
||||||
python = ">=3.9,<3.13"
|
python = ">=3.9,<3.14"
|
||||||
click = "*"
|
click = "*"
|
||||||
platformdirs = "*"
|
platformdirs = "*"
|
||||||
PySide6 = "^6.7.1"
|
PySide6 = "^6.7.1"
|
||||||
|
@ -42,6 +42,7 @@ cx_freeze = {version = "^7.2.5", platform = "win32"}
|
||||||
pywin32 = {version = "*", platform = "win32"}
|
pywin32 = {version = "*", platform = "win32"}
|
||||||
pyinstaller = {version = "*", platform = "darwin"}
|
pyinstaller = {version = "*", platform = "darwin"}
|
||||||
doit = "^0.36.0"
|
doit = "^0.36.0"
|
||||||
|
jinja2-cli = "^0.8.2"
|
||||||
|
|
||||||
# Dependencies required for linting the code.
|
# Dependencies required for linting the code.
|
||||||
[tool.poetry.group.lint.dependencies]
|
[tool.poetry.group.lint.dependencies]
|
||||||
|
@ -64,7 +65,7 @@ strip-ansi = "*"
|
||||||
pytest-subprocess = "^1.5.2"
|
pytest-subprocess = "^1.5.2"
|
||||||
pytest-rerunfailures = "^14.0"
|
pytest-rerunfailures = "^14.0"
|
||||||
|
|
||||||
[tool.poetry.group.container.dependencies]
|
[tool.poetry.group.debian.dependencies]
|
||||||
pymupdf = "1.24.11" # Last version to support python 3.8 (needed for Ubuntu Focal support)
|
pymupdf = "1.24.11" # Last version to support python 3.8 (needed for Ubuntu Focal support)
|
||||||
|
|
||||||
[tool.poetry.group.dev.dependencies]
|
[tool.poetry.group.dev.dependencies]
|
||||||
|
@ -73,11 +74,6 @@ httpx = "^0.27.2"
|
||||||
[tool.doit]
|
[tool.doit]
|
||||||
verbosity = 3
|
verbosity = 3
|
||||||
|
|
||||||
[tool.doit.tasks.build_image]
|
|
||||||
# DO NOT change this to 'true' for release artifacts, else we risk building
|
|
||||||
# images that are a few days behind. See also: docs/developer/doit.md
|
|
||||||
use_cache = false
|
|
||||||
|
|
||||||
[tool.ruff.lint]
|
[tool.ruff.lint]
|
||||||
select = [
|
select = [
|
||||||
# isort
|
# isort
|
||||||
|
|
Loading…
Reference in a new issue