mirror of
https://github.com/freedomofpress/dangerzone.git
synced 2025-04-28 18:02:38 +02:00
Fix references to container.tar.gz
Find all references to the `container.tar.gz` file, and replace them with references to `container.tar`. Moreover, remove the `--no-save` argument of `build-image.py` since we now always save the image. Finally, fix some stale references to Poetry, which are not necessary anymore.
This commit is contained in:
parent
69234507c4
commit
51f432be6b
12 changed files with 72 additions and 89 deletions
13
.github/workflows/build.yml
vendored
13
.github/workflows/build.yml
vendored
|
@ -87,19 +87,12 @@ jobs:
|
||||||
id: cache-container-image
|
id: cache-container-image
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
key: v4-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/*.py', 'dangerzone/container_helpers/*', 'install/common/build-image.py') }}
|
key: v5-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/*.py', 'dangerzone/container_helpers/*', 'install/common/build-image.py') }}
|
||||||
path: |
|
path: |
|
||||||
share/container.tar.gz
|
share/container.tar
|
||||||
share/image-id.txt
|
share/image-id.txt
|
||||||
|
|
||||||
- name: Build and push Dangerzone image
|
- name: Build Dangerzone image
|
||||||
if: ${{ steps.cache-container-image.outputs.cache-hit != 'true' }}
|
if: ${{ steps.cache-container-image.outputs.cache-hit != 'true' }}
|
||||||
run: |
|
run: |
|
||||||
sudo apt-get install -y python3-poetry
|
|
||||||
python3 ./install/common/build-image.py
|
python3 ./install/common/build-image.py
|
||||||
echo ${{ github.token }} | podman login ghcr.io -u USERNAME --password-stdin
|
|
||||||
gunzip -c share/container.tar.gz | podman load
|
|
||||||
tag=$(cat share/image-id.txt)
|
|
||||||
podman push \
|
|
||||||
dangerzone.rocks/dangerzone:$tag \
|
|
||||||
${{ env.IMAGE_REGISTRY }}/dangerzone/dangerzone:tag
|
|
||||||
|
|
20
.github/workflows/ci.yml
vendored
20
.github/workflows/ci.yml
vendored
|
@ -59,9 +59,9 @@ jobs:
|
||||||
id: cache-container-image
|
id: cache-container-image
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
key: v4-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/*.py', 'dangerzone/container_helpers/*', 'install/common/build-image.py') }}
|
key: v5-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/*.py', 'dangerzone/container_helpers/*', 'install/common/build-image.py') }}
|
||||||
path: |-
|
path: |-
|
||||||
share/container.tar.gz
|
share/container.tar
|
||||||
share/image-id.txt
|
share/image-id.txt
|
||||||
|
|
||||||
- name: Build Dangerzone container image
|
- name: Build Dangerzone container image
|
||||||
|
@ -72,8 +72,8 @@ jobs:
|
||||||
- name: Upload container image
|
- name: Upload container image
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: container.tar.gz
|
name: container.tar
|
||||||
path: share/container.tar.gz
|
path: share/container.tar
|
||||||
|
|
||||||
download-tessdata:
|
download-tessdata:
|
||||||
name: Download and cache Tesseract data
|
name: Download and cache Tesseract data
|
||||||
|
@ -226,9 +226,9 @@ jobs:
|
||||||
- name: Restore container cache
|
- name: Restore container cache
|
||||||
uses: actions/cache/restore@v4
|
uses: actions/cache/restore@v4
|
||||||
with:
|
with:
|
||||||
key: v4-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/*.py', 'dangerzone/container_helpers/*', 'install/common/build-image.py') }}
|
key: v5-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/*.py', 'dangerzone/container_helpers/*', 'install/common/build-image.py') }}
|
||||||
path: |-
|
path: |-
|
||||||
share/container.tar.gz
|
share/container.tar
|
||||||
share/image-id.txt
|
share/image-id.txt
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
|
|
||||||
|
@ -333,9 +333,9 @@ jobs:
|
||||||
- name: Restore container image
|
- name: Restore container image
|
||||||
uses: actions/cache/restore@v4
|
uses: actions/cache/restore@v4
|
||||||
with:
|
with:
|
||||||
key: v4-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/*.py', 'dangerzone/container_helpers/*', 'install/common/build-image.py') }}
|
key: v5-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/*.py', 'dangerzone/container_helpers/*', 'install/common/build-image.py') }}
|
||||||
path: |-
|
path: |-
|
||||||
share/container.tar.gz
|
share/container.tar
|
||||||
share/image-id.txt
|
share/image-id.txt
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
|
|
||||||
|
@ -430,9 +430,9 @@ jobs:
|
||||||
- name: Restore container image
|
- name: Restore container image
|
||||||
uses: actions/cache/restore@v4
|
uses: actions/cache/restore@v4
|
||||||
with:
|
with:
|
||||||
key: v4-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/*.py', 'dangerzone/container_helpers/*', 'install/common/build-image.py') }}
|
key: v5-${{ steps.date.outputs.date }}-${{ hashFiles('Dockerfile', 'dangerzone/conversion/*.py', 'dangerzone/container_helpers/*', 'install/common/build-image.py') }}
|
||||||
path: |-
|
path: |-
|
||||||
share/container.tar.gz
|
share/container.tar
|
||||||
share/image-id.txt
|
share/image-id.txt
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
|
|
||||||
|
|
17
.github/workflows/scan.yml
vendored
17
.github/workflows/scan.yml
vendored
|
@ -21,19 +21,12 @@ jobs:
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Install container build dependencies
|
|
||||||
run: |
|
|
||||||
sudo apt install pipx
|
|
||||||
pipx install poetry
|
|
||||||
pipx inject poetry poetry-plugin-export
|
|
||||||
poetry install --only package
|
|
||||||
- name: Bump date of Debian snapshot archive
|
|
||||||
run: |
|
|
||||||
date=$(date "+%Y%m%d")
|
|
||||||
sed -i "s/DEBIAN_ARCHIVE_DATE=[0-9]\+/DEBIAN_ARCHIVE_DATE=${date}/" Dockerfile.env
|
|
||||||
make Dockerfile
|
|
||||||
- name: Build container image
|
- name: Build container image
|
||||||
run: python3 ./install/common/build-image.py --runtime docker --no-save
|
run: |
|
||||||
|
python3 ./install/common/build-image.py \
|
||||||
|
--debian-archive-date $(date "+%Y%m%d") \
|
||||||
|
--runtime docker
|
||||||
|
docker load -i share/container.tar
|
||||||
- name: Get image tag
|
- name: Get image tag
|
||||||
id: tag
|
id: tag
|
||||||
run: echo "tag=$(cat share/image-id.txt)" >> $GITHUB_OUTPUT
|
run: echo "tag=$(cat share/image-id.txt)" >> $GITHUB_OUTPUT
|
||||||
|
|
|
@ -269,7 +269,7 @@ Our [GitHub Releases page](https://github.com/freedomofpress/dangerzone/releases
|
||||||
hosts the following files:
|
hosts the following files:
|
||||||
* Windows installer (`Dangerzone-<version>.msi`)
|
* Windows installer (`Dangerzone-<version>.msi`)
|
||||||
* macOS archives (`Dangerzone-<version>-<arch>.dmg`)
|
* macOS archives (`Dangerzone-<version>-<arch>.dmg`)
|
||||||
* Container images (`container-<version>-<arch>.tar.gz`)
|
* Container images (`container-<version>-<arch>.tar`)
|
||||||
* Source package (`dangerzone-<version>.tar.gz`)
|
* Source package (`dangerzone-<version>.tar.gz`)
|
||||||
|
|
||||||
All these files are accompanied by signatures (as `.asc` files). We'll explain
|
All these files are accompanied by signatures (as `.asc` files). We'll explain
|
||||||
|
@ -297,7 +297,7 @@ gpg --verify Dangerzone-0.6.1-i686.dmg.asc Dangerzone-0.6.1-i686.dmg
|
||||||
For the container images:
|
For the container images:
|
||||||
|
|
||||||
```
|
```
|
||||||
gpg --verify container-0.6.1-i686.tar.gz.asc container-0.6.1-i686.tar.gz
|
gpg --verify container-0.6.1-i686.tar.asc container-0.6.1-i686.tar
|
||||||
```
|
```
|
||||||
|
|
||||||
For the source package:
|
For the source package:
|
||||||
|
|
|
@ -150,7 +150,7 @@ Here is what you need to do:
|
||||||
poetry run ./install/common/download-tessdata.py
|
poetry run ./install/common/download-tessdata.py
|
||||||
|
|
||||||
# Copy the container image to the assets folder
|
# Copy the container image to the assets folder
|
||||||
cp share/container.tar.gz ~dz/release-assets/$VERSION/dangerzone-$VERSION-arm64.tar.gz
|
cp share/container.tar ~dz/release-assets/$VERSION/dangerzone-$VERSION-arm64.tar
|
||||||
cp share/image-id.txt ~dz/release-assets/$VERSION/.
|
cp share/image-id.txt ~dz/release-assets/$VERSION/.
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -227,7 +227,7 @@ The Windows release is performed in a Windows 11 virtual machine (as opposed to
|
||||||
|
|
||||||
- [ ] Copy the container image into the VM
|
- [ ] Copy the container image into the VM
|
||||||
> [!IMPORTANT]
|
> [!IMPORTANT]
|
||||||
> Instead of running `python .\install\windows\build-image.py` in the VM, run the build image script on the host (making sure to build for `linux/amd64`). Copy `share/container.tar.gz` and `share/image-id.txt` from the host into the `share` folder in the VM.
|
> Instead of running `python .\install\windows\build-image.py` in the VM, run the build image script on the host (making sure to build for `linux/amd64`). Copy `share/container.tar` and `share/image-id.txt` from the host into the `share` folder in the VM.
|
||||||
- [ ] Run `poetry run .\install\windows\build-app.bat`
|
- [ ] Run `poetry run .\install\windows\build-app.bat`
|
||||||
- [ ] When you're done you will have `dist\Dangerzone.msi`
|
- [ ] When you're done you will have `dist\Dangerzone.msi`
|
||||||
|
|
||||||
|
@ -318,9 +318,8 @@ To publish the release, you can follow these steps:
|
||||||
|
|
||||||
- [ ] Run container scan on the produced container images (some time may have passed since the artifacts were built)
|
- [ ] Run container scan on the produced container images (some time may have passed since the artifacts were built)
|
||||||
```bash
|
```bash
|
||||||
gunzip --keep -c ./share/container.tar.gz > /tmp/container.tar
|
|
||||||
docker pull anchore/grype:latest
|
docker pull anchore/grype:latest
|
||||||
docker run --rm -v /tmp/container.tar:/container.tar anchore/grype:latest /container.tar
|
docker run --rm -v ./share/container.tar:/container.tar anchore/grype:latest /container.tar
|
||||||
```
|
```
|
||||||
|
|
||||||
- [ ] Collect the assets in a single directory, calculate their SHA-256 hashes, and sign them.
|
- [ ] Collect the assets in a single directory, calculate their SHA-256 hashes, and sign them.
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
import gzip
|
|
||||||
import logging
|
import logging
|
||||||
import platform
|
import platform
|
||||||
import shutil
|
import shutil
|
||||||
|
@ -120,30 +119,23 @@ def get_expected_tag() -> str:
|
||||||
|
|
||||||
def load_image_tarball() -> None:
|
def load_image_tarball() -> None:
|
||||||
log.info("Installing Dangerzone container image...")
|
log.info("Installing Dangerzone container image...")
|
||||||
p = subprocess.Popen(
|
tarball_path = get_resource_path("container.tar")
|
||||||
[get_runtime(), "load"],
|
with open(tarball_path) as f:
|
||||||
stdin=subprocess.PIPE,
|
try:
|
||||||
startupinfo=get_subprocess_startupinfo(),
|
subprocess.run(
|
||||||
)
|
[get_runtime(), "load"],
|
||||||
|
stdin=f,
|
||||||
chunk_size = 4 << 20
|
startupinfo=get_subprocess_startupinfo(),
|
||||||
compressed_container_path = get_resource_path("container.tar.gz")
|
capture_output=True,
|
||||||
with gzip.open(compressed_container_path) as f:
|
check=True,
|
||||||
while True:
|
)
|
||||||
chunk = f.read(chunk_size)
|
except subprocess.CalledProcessError as e:
|
||||||
if len(chunk) > 0:
|
if e.stderr:
|
||||||
if p.stdin:
|
error = e.stderr.decode()
|
||||||
p.stdin.write(chunk)
|
|
||||||
else:
|
else:
|
||||||
break
|
error = "No output"
|
||||||
_, err = p.communicate()
|
raise errors.ImageInstallationException(
|
||||||
if p.returncode < 0:
|
f"Could not install container image: {error}"
|
||||||
if err:
|
)
|
||||||
error = err.decode()
|
|
||||||
else:
|
|
||||||
error = "No output"
|
|
||||||
raise errors.ImageInstallationException(
|
|
||||||
f"Could not install container image: {error}"
|
|
||||||
)
|
|
||||||
|
|
||||||
log.info("Successfully installed container image from")
|
log.info("Successfully installed container image")
|
||||||
|
|
|
@ -130,7 +130,7 @@ def is_qubes_native_conversion() -> bool:
|
||||||
# This disambiguates if it is running a Qubes targetted build or not
|
# This disambiguates if it is running a Qubes targetted build or not
|
||||||
# (Qubes-specific builds don't ship the container image)
|
# (Qubes-specific builds don't ship the container image)
|
||||||
|
|
||||||
compressed_container_path = get_resource_path("container.tar.gz")
|
container_image_path = get_resource_path("container.tar")
|
||||||
return not os.path.exists(compressed_container_path)
|
return not os.path.exists(container_image_path)
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
|
|
|
@ -11,8 +11,8 @@ log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
DZ_ASSETS = [
|
DZ_ASSETS = [
|
||||||
"container-{version}-i686.tar.gz",
|
"container-{version}-i686.tar",
|
||||||
"container-{version}-arm64.tar.gz",
|
"container-{version}-arm64.tar",
|
||||||
"Dangerzone-{version}.msi",
|
"Dangerzone-{version}.msi",
|
||||||
"Dangerzone-{version}-arm64.dmg",
|
"Dangerzone-{version}-arm64.dmg",
|
||||||
"Dangerzone-{version}-i686.dmg",
|
"Dangerzone-{version}-i686.dmg",
|
||||||
|
|
6
dodo.py
6
dodo.py
|
@ -57,7 +57,7 @@ IMAGE_DEPS = [
|
||||||
*list_files("dangerzone/container_helpers"),
|
*list_files("dangerzone/container_helpers"),
|
||||||
"install/common/build-image.py",
|
"install/common/build-image.py",
|
||||||
]
|
]
|
||||||
IMAGE_TARGETS = ["share/container.tar.gz", "share/image-id.txt"]
|
IMAGE_TARGETS = ["share/container.tar", "share/image-id.txt"]
|
||||||
|
|
||||||
SOURCE_DEPS = [
|
SOURCE_DEPS = [
|
||||||
*list_files("assets"),
|
*list_files("assets"),
|
||||||
|
@ -188,8 +188,8 @@ def task_download_tessdata():
|
||||||
|
|
||||||
def task_build_image():
|
def task_build_image():
|
||||||
"""Build the container image using ./install/common/build-image.py"""
|
"""Build the container image using ./install/common/build-image.py"""
|
||||||
img_src = "share/container.tar.gz"
|
img_src = "share/container.tar"
|
||||||
img_dst = RELEASE_DIR / f"container-{VERSION}-{ARCH}.tar.gz" # FIXME: Add arch
|
img_dst = RELEASE_DIR / f"container-{VERSION}-{ARCH}.tar" # FIXME: Add arch
|
||||||
img_id_src = "share/image-id.txt"
|
img_id_src = "share/image-id.txt"
|
||||||
img_id_dst = RELEASE_DIR / "image-id.txt" # FIXME: Add arch
|
img_id_dst = RELEASE_DIR / "image-id.txt" # FIXME: Add arch
|
||||||
|
|
||||||
|
|
|
@ -66,14 +66,14 @@ def build(build_dir, qubes=False):
|
||||||
print("* Creating a Python sdist")
|
print("* Creating a Python sdist")
|
||||||
tessdata = root / "share" / "tessdata"
|
tessdata = root / "share" / "tessdata"
|
||||||
tessdata_bak = root / "tessdata.bak"
|
tessdata_bak = root / "tessdata.bak"
|
||||||
container_tar_gz = root / "share" / "container.tar.gz"
|
container_tar = root / "share" / "container.tar"
|
||||||
container_tar_gz_bak = root / "container.tar.gz.bak"
|
container_tar_bak = root / "container.tar.bak"
|
||||||
|
|
||||||
if tessdata.exists():
|
if tessdata.exists():
|
||||||
tessdata.rename(tessdata_bak)
|
tessdata.rename(tessdata_bak)
|
||||||
stash_container = qubes and container_tar_gz.exists()
|
stash_container = qubes and container_tar.exists()
|
||||||
if stash_container and container_tar_gz.exists():
|
if stash_container and container_tar.exists():
|
||||||
container_tar_gz.rename(container_tar_gz_bak)
|
container_tar.rename(container_tar_bak)
|
||||||
try:
|
try:
|
||||||
subprocess.run(["poetry", "build", "-f", "sdist"], cwd=root, check=True)
|
subprocess.run(["poetry", "build", "-f", "sdist"], cwd=root, check=True)
|
||||||
# Copy and unlink the Dangerzone sdist, instead of just renaming it. If the
|
# Copy and unlink the Dangerzone sdist, instead of just renaming it. If the
|
||||||
|
@ -84,8 +84,8 @@ def build(build_dir, qubes=False):
|
||||||
finally:
|
finally:
|
||||||
if tessdata_bak.exists():
|
if tessdata_bak.exists():
|
||||||
tessdata_bak.rename(tessdata)
|
tessdata_bak.rename(tessdata)
|
||||||
if stash_container and container_tar_gz_bak.exists():
|
if stash_container and container_tar_bak.exists():
|
||||||
container_tar_gz_bak.rename(container_tar_gz)
|
container_tar_bak.rename(container_tar)
|
||||||
|
|
||||||
print("* Building RPM package")
|
print("* Building RPM package")
|
||||||
cmd = [
|
cmd = [
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
#
|
#
|
||||||
# * Qubes packages include some extra files under /etc/qubes-rpc, whereas
|
# * Qubes packages include some extra files under /etc/qubes-rpc, whereas
|
||||||
# regular RPM packages include the container image under
|
# regular RPM packages include the container image under
|
||||||
# /usr/share/container.tar.gz
|
# /usr/share/container.tar
|
||||||
# * Qubes packages have some extra dependencies.
|
# * Qubes packages have some extra dependencies.
|
||||||
# 3. It is best to consume this SPEC file using the `install/linux/build-rpm.py`
|
# 3. It is best to consume this SPEC file using the `install/linux/build-rpm.py`
|
||||||
# script, which handles the necessary scaffolding for building the package.
|
# script, which handles the necessary scaffolding for building the package.
|
||||||
|
|
|
@ -8,6 +8,7 @@ from pytest_subprocess import FakeProcess
|
||||||
from dangerzone import container_utils, errors
|
from dangerzone import container_utils, errors
|
||||||
from dangerzone.isolation_provider.container import Container
|
from dangerzone.isolation_provider.container import Container
|
||||||
from dangerzone.isolation_provider.qubes import is_qubes_native_conversion
|
from dangerzone.isolation_provider.qubes import is_qubes_native_conversion
|
||||||
|
from dangerzone.util import get_resource_path
|
||||||
|
|
||||||
from .base import IsolationProviderTermination, IsolationProviderTest
|
from .base import IsolationProviderTermination, IsolationProviderTest
|
||||||
|
|
||||||
|
@ -47,7 +48,7 @@ class TestContainer(IsolationProviderTest):
|
||||||
provider.is_available()
|
provider.is_available()
|
||||||
|
|
||||||
def test_install_raise_if_image_cant_be_installed(
|
def test_install_raise_if_image_cant_be_installed(
|
||||||
self, mocker: MockerFixture, provider: Container, fp: FakeProcess
|
self, provider: Container, fp: FakeProcess
|
||||||
) -> None:
|
) -> None:
|
||||||
"""When an image installation fails, an exception should be raised"""
|
"""When an image installation fails, an exception should be raised"""
|
||||||
|
|
||||||
|
@ -68,11 +69,13 @@ class TestContainer(IsolationProviderTest):
|
||||||
occurrences=2,
|
occurrences=2,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Make podman load fail
|
|
||||||
mocker.patch("gzip.open", mocker.mock_open(read_data=""))
|
|
||||||
|
|
||||||
fp.register_subprocess(
|
fp.register_subprocess(
|
||||||
[container_utils.get_runtime(), "load"],
|
[
|
||||||
|
container_utils.get_runtime(),
|
||||||
|
"load",
|
||||||
|
"-i",
|
||||||
|
get_resource_path("container.tar"),
|
||||||
|
],
|
||||||
returncode=-1,
|
returncode=-1,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -80,7 +83,7 @@ class TestContainer(IsolationProviderTest):
|
||||||
provider.install()
|
provider.install()
|
||||||
|
|
||||||
def test_install_raises_if_still_not_installed(
|
def test_install_raises_if_still_not_installed(
|
||||||
self, mocker: MockerFixture, provider: Container, fp: FakeProcess
|
self, provider: Container, fp: FakeProcess
|
||||||
) -> None:
|
) -> None:
|
||||||
"""When an image keep being not installed, it should return False"""
|
"""When an image keep being not installed, it should return False"""
|
||||||
|
|
||||||
|
@ -101,10 +104,13 @@ class TestContainer(IsolationProviderTest):
|
||||||
occurrences=2,
|
occurrences=2,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Patch gzip.open and podman load so that it works
|
|
||||||
mocker.patch("gzip.open", mocker.mock_open(read_data=""))
|
|
||||||
fp.register_subprocess(
|
fp.register_subprocess(
|
||||||
[container_utils.get_runtime(), "load"],
|
[
|
||||||
|
container_utils.get_runtime(),
|
||||||
|
"load",
|
||||||
|
"-i",
|
||||||
|
get_resource_path("container.tar"),
|
||||||
|
],
|
||||||
)
|
)
|
||||||
with pytest.raises(errors.ImageNotPresentException):
|
with pytest.raises(errors.ImageNotPresentException):
|
||||||
provider.install()
|
provider.install()
|
||||||
|
@ -191,7 +197,7 @@ class TestContainer(IsolationProviderTest):
|
||||||
reason="Linux specific",
|
reason="Linux specific",
|
||||||
)
|
)
|
||||||
def test_linux_skips_desktop_version_check_returns_true(
|
def test_linux_skips_desktop_version_check_returns_true(
|
||||||
self, mocker: MockerFixture, provider: Container
|
self, provider: Container
|
||||||
) -> None:
|
) -> None:
|
||||||
assert (True, "") == provider.check_docker_desktop_version()
|
assert (True, "") == provider.check_docker_desktop_version()
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue