diff --git a/.github/workflows/build-umu-fedora-40.yml b/.github/workflows/build-umu-fedora-40.yml index 0724a4926..a578aec47 100644 --- a/.github/workflows/build-umu-fedora-40.yml +++ b/.github/workflows/build-umu-fedora-40.yml @@ -25,7 +25,7 @@ jobs: run: git config --global --add safe.directory "$GITHUB_WORKSPACE" - name: Install build dependencies - run: dnf install -y rpm-build meson ninja-build cmake g++ gcc-c++ scdoc git python3-devel python3-build python3-installer python3-hatchling python python3 + run: dnf install -y rpm-build meson ninja-build cmake g++ gcc-c++ scdoc git python3-devel python3-build python3-installer python3-hatchling python python3 cargo - name: Build the project run: | diff --git a/.github/workflows/build-umu-fedora-41.yml b/.github/workflows/build-umu-fedora-41.yml index 5457ce32a..c27443450 100644 --- a/.github/workflows/build-umu-fedora-41.yml +++ b/.github/workflows/build-umu-fedora-41.yml @@ -25,7 +25,7 @@ jobs: run: git config --global --add safe.directory "$GITHUB_WORKSPACE" - name: Install build dependencies - run: dnf install -y rpm-build meson ninja-build cmake g++ gcc-c++ scdoc git python3-devel python3-build python3-installer python3-hatchling python python3 + run: dnf install -y rpm-build meson ninja-build cmake g++ gcc-c++ scdoc git python3-devel python3-build python3-installer python3-hatchling python python3 cargo - name: Build the project run: | diff --git a/.github/workflows/build-umu-zipapp.yml b/.github/workflows/build-umu-zipapp.yml index 3483dcef9..738b7c034 100644 --- a/.github/workflows/build-umu-zipapp.yml +++ b/.github/workflows/build-umu-zipapp.yml @@ -23,7 +23,7 @@ jobs: run: apt update -y - name: Install build dependencies - run: apt install -y python3-venv python3-all bash make scdoc python3-hatchling python3-installer python3-build + run: apt install -y python3-venv python3-all bash make scdoc python3-hatchling python3-installer python3-build cargo - name: Configure run: ./configure.sh --user-install diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml index 3cb74eafe..c9d66e7d3 100644 --- a/.github/workflows/e2e.yml +++ b/.github/workflows/e2e.yml @@ -11,7 +11,7 @@ permissions: jobs: build: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - uses: actions/checkout@v4 @@ -19,7 +19,7 @@ jobs: fetch-depth: 0 - name: Install dependencies run: | - sudo apt-get install meson scdoc python3-hatchling python3-build python3-installer python3-filelock shellcheck bubblewrap + sudo apt-get install meson scdoc python3-hatchling python3-build python3-installer python3-filelock shellcheck bubblewrap cargo python3 -m pip install --upgrade pip pip install uv - name: Initialize submodules diff --git a/.github/workflows/make.yml b/.github/workflows/make.yml index b87303654..806fb9c2d 100644 --- a/.github/workflows/make.yml +++ b/.github/workflows/make.yml @@ -19,7 +19,7 @@ jobs: fetch-depth: 0 - name: Install dependencies run: | - sudo apt-get install meson shellcheck scdoc python3-hatchling python3-build python3-installer python3-filelock + sudo apt-get install meson shellcheck scdoc python3-hatchling python3-build python3-installer python3-filelock cargo - name: Initialize submodules run: | git submodule update --init --recursive diff --git a/.gitignore b/.gitignore index 85ee0c028..45d88c655 100644 --- a/.gitignore +++ b/.gitignore @@ -6,3 +6,5 @@ umu_version.json umu_version.json.in.tmp Makefile /debian +umu_delta*.so +target diff --git a/Cargo.lock b/Cargo.lock new file mode 100644 index 000000000..fe8dc89f0 --- /dev/null +++ b/Cargo.lock @@ -0,0 +1,414 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 4 + +[[package]] +name = "autocfg" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" + +[[package]] +name = "base16ct" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" + +[[package]] +name = "base64ct" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" + +[[package]] +name = "block-buffer" +version = "0.10.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3078c7629b62d3f0439517fa394996acacc5cbc91c5a20d8c658e77abd503a71" +dependencies = [ + "generic-array", +] + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "cipher" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" +dependencies = [ + "crypto-common", + "inout", +] + +[[package]] +name = "cpufeatures" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16b80225097f2e5ae4e7179dd2266824648f3e2f49d9134d584b76389d31c4c3" +dependencies = [ + "libc", +] + +[[package]] +name = "crypto-common" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" +dependencies = [ + "generic-array", + "typenum", +] + +[[package]] +name = "curve25519-dalek" +version = "4.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97fb8b7c4503de7d6ae7b42ab72a5a59857b4c937ec27a3d4539dba95b5ab2be" +dependencies = [ + "cfg-if", + "cpufeatures", + "curve25519-dalek-derive", + "digest", + "fiat-crypto", + "rustc_version", + "subtle", +] + +[[package]] +name = "curve25519-dalek-derive" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" +dependencies = [ + "proc-macro2", + "quote", + "syn", +] + +[[package]] +name = "digest" +version = "0.10.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" +dependencies = [ + "block-buffer", + "crypto-common", +] + +[[package]] +name = "ed25519" +version = "2.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "115531babc129696a58c64a4fef0a8bf9e9698629fb97e9e40767d235cfbcd53" +dependencies = [ + "signature", +] + +[[package]] +name = "ed25519-dalek" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a3daa8e81a3963a60642bcc1f90a670680bd4a77535faa384e9d1c79d620871" +dependencies = [ + "curve25519-dalek", + "ed25519", + "sha2", + "subtle", +] + +[[package]] +name = "fiat-crypto" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d" + +[[package]] +name = "generic-array" +version = "0.14.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +dependencies = [ + "typenum", + "version_check", +] + +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + +[[package]] +name = "indoc" +version = "2.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b248f5224d1d606005e02c97f5aa4e88eeb230488bcc03bc9ca4d7991399f2b5" + +[[package]] +name = "inout" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0c10553d664a4d0bcff9f4215d0aac67a639cc68ef660840afe309b807bc9f5" +dependencies = [ + "generic-array", +] + +[[package]] +name = "libc" +version = "0.2.169" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" + +[[package]] +name = "memoffset" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "488016bfae457b036d996092f6cb448677611ce4449e970ceaf42695203f218a" +dependencies = [ + "autocfg", +] + +[[package]] +name = "once_cell" +version = "1.20.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" + +[[package]] +name = "pem-rfc7468" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88b39c9bfcfc231068454382784bb460aae594343fb030d46e9f50a645418412" +dependencies = [ + "base64ct", +] + +[[package]] +name = "portable-atomic" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "280dc24453071f1b63954171985a0b0d30058d287960968b9b2aca264c8d4ee6" + +[[package]] +name = "proc-macro2" +version = "1.0.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "37d3544b3f2748c54e147655edb5025752e2303145b5aefb3c3ea2c78b973bb0" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "pyo3" +version = "0.23.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e484fd2c8b4cb67ab05a318f1fd6fa8f199fcc30819f08f07d200809dba26c15" +dependencies = [ + "cfg-if", + "indoc", + "libc", + "memoffset", + "once_cell", + "portable-atomic", + "pyo3-build-config", + "pyo3-ffi", + "pyo3-macros", + "unindent", +] + +[[package]] +name = "pyo3-build-config" +version = "0.23.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc0e0469a84f208e20044b98965e1561028180219e35352a2afaf2b942beff3b" +dependencies = [ + "once_cell", + "target-lexicon", +] + +[[package]] +name = "pyo3-ffi" +version = "0.23.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb1547a7f9966f6f1a0f0227564a9945fe36b90da5a93b3933fc3dc03fae372d" +dependencies = [ + "libc", + "pyo3-build-config", +] + +[[package]] +name = "pyo3-macros" +version = "0.23.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdb6da8ec6fa5cedd1626c886fc8749bdcbb09424a86461eb8cdf096b7c33257" +dependencies = [ + "proc-macro2", + "pyo3-macros-backend", + "quote", + "syn", +] + +[[package]] +name = "pyo3-macros-backend" +version = "0.23.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38a385202ff5a92791168b1136afae5059d3ac118457bb7bc304c197c2d33e7d" +dependencies = [ + "heck", + "proc-macro2", + "pyo3-build-config", + "quote", + "syn", +] + +[[package]] +name = "quote" +version = "1.0.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0e4dccaaaf89514f546c693ddc140f729f958c247918a13380cccc6078391acc" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "rand_core" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" + +[[package]] +name = "rustc_version" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" +dependencies = [ + "semver", +] + +[[package]] +name = "semver" +version = "1.0.24" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3cb6eb87a131f756572d7fb904f6e7b68633f09cca868c5df1c4b8d1a694bbba" + +[[package]] +name = "sha2" +version = "0.10.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" +dependencies = [ + "cfg-if", + "cpufeatures", + "digest", +] + +[[package]] +name = "signature" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" + +[[package]] +name = "ssh-cipher" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "caac132742f0d33c3af65bfcde7f6aa8f62f0e991d80db99149eb9d44708784f" +dependencies = [ + "cipher", + "ssh-encoding", +] + +[[package]] +name = "ssh-encoding" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb9242b9ef4108a78e8cd1a2c98e193ef372437f8c22be363075233321dd4a15" +dependencies = [ + "base64ct", + "pem-rfc7468", + "sha2", +] + +[[package]] +name = "ssh-key" +version = "0.6.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b86f5297f0f04d08cabaa0f6bff7cb6aec4d9c3b49d87990d63da9d9156a8c3" +dependencies = [ + "ed25519-dalek", + "rand_core", + "sha2", + "signature", + "ssh-cipher", + "ssh-encoding", + "subtle", + "zeroize", +] + +[[package]] +name = "subtle" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" + +[[package]] +name = "syn" +version = "2.0.94" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "987bc0be1cdea8b10216bd06e2ca407d40b9543468fafd3ddfb02f36e77f71f3" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "target-lexicon" +version = "0.12.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "61c41af27dd6d1e27b1b16b489db798443478cef1f06a660c96db617ba5de3b1" + +[[package]] +name = "typenum" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" + +[[package]] +name = "umu" +version = "0.1.0" +dependencies = [ + "base16ct", + "pyo3", + "sha2", + "ssh-key", +] + +[[package]] +name = "unicode-ident" +version = "1.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83" + +[[package]] +name = "unindent" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c7de7d73e1754487cb58364ee906a499937a0dfabd86bcb980fa99ec8c8fa2ce" + +[[package]] +name = "version_check" +version = "0.9.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" + +[[package]] +name = "zeroize" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 000000000..332878d19 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "umu" +version = "0.1.0" +edition = "2021" +rust-version = "1.83" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +[lib] +name = "umu_delta" +crate-type = ["cdylib"] + +[dependencies] +pyo3 = { version = "0.23.3", features = ["extension-module"] } +ssh-key = { version = "0.6.7", default-features = false, features = [ + "ed25519", + "alloc", +] } +sha2 = "0.10.8" +base16ct = { version = "0.2.0", features = ["alloc"] } diff --git a/Makefile.in b/Makefile.in index 9324abc29..03b18fc4b 100644 --- a/Makefile.in +++ b/Makefile.in @@ -66,9 +66,9 @@ umu-dist-install: umu-dist $(PYTHON_INTERPRETER) -m installer --destdir=$(DESTDIR) $(OBJDIR)/*.whl ifeq ($(FLATPAK), xtrue) -umu-install: umu-dist-install +umu-install: umu-dist-install umu-delta-install else -umu-install: umu-dist-install umu-docs-install +umu-install: umu-dist-install umu-delta-install umu-docs-install endif ifeq ($(FLATPAK), xtrue) @@ -102,6 +102,7 @@ umu-launcher-install: umu-launcher-dist-install umu-launcher-bin-install $(OBJDIR)/.build-umu-vendored: | $(OBJDIR) $(info :: Building vendored dependencies ) python3 -m pip install urllib3 -t $(OBJDIR) + python3 -m pip install pyzstd --config-settings="--build-option=--dynamic-link-zstd" -t $(OBJDIR) .PHONY: umu-vendored umu-vendored: $(OBJDIR)/.build-umu-vendored @@ -110,6 +111,7 @@ umu-vendored-install: umu-vendored $(info :: Installing subprojects ) install -d $(DESTDIR)$(PYTHONDIR)/umu/_vendor cp -r $(OBJDIR)/urllib3 $(DESTDIR)$(PYTHONDIR)/umu/_vendor + cp -r $(OBJDIR)/pyzstd $(DESTDIR)$(PYTHONDIR)/umu/_vendor $(OBJDIR): @mkdir -p $(@) @@ -167,4 +169,18 @@ zipapp-install: zipapp install -Dm755 -p $(ZIPAPP) $(DESTDIR)$(BINDIR) @echo "Standalone application 'umu-run' created at '$(DESTDIR)$(PREFIX)/bin'" +PYTHON_PLATFORM_TAG = $(shell $(PYTHON_INTERPRETER) -c 'import sysconfig; print(sysconfig.get_config_var("EXT_SUFFIX"))') + +$(OBJDIR)/.build-umu-delta: | $(OBJDIR) + $(info :: Building delta dependencies ) + cargo build -r + cp -a ./target/release/libumu_delta.so $(OBJDIR)/umu_delta$(PYTHON_PLATFORM_TAG) + +.PHONY: umu-delta +umu-delta: $(OBJDIR)/.build-umu-delta + +umu-delta-install: umu-delta + $(info :: Installing delta dependencies ) + install -Dm755 $(OBJDIR)/umu_delta$(PYTHON_PLATFORM_TAG) $(DESTDIR)$(PYTHONDIR)/$(INSTALLDIR)/umu_delta$(PYTHON_PLATFORM_TAG) + # vim: ft=make diff --git a/README.md b/README.md index e86b15afd..13ae9ddb7 100644 --- a/README.md +++ b/README.md @@ -81,6 +81,8 @@ Borderlands 3 from EGS store. Building umu-launcher currently requires `bash`, `make`, and `scdoc` for distribution, as well as the following Python build tools: [build](https://github.com/pypa/build), [hatchling](https://github.com/pypa/hatch), [installer](https://github.com/pypa/installer), and [pip](https://github.com/pypa/pip). +Additionally, [cargo](https://github.com/rust-lang/cargo) will be required with the minimum MSRV being the latest stable versions of it's direct dependencies. + To build umu-launcher, after downloading and extracting the source code from this repository, change into the newly extracted directory ```shell cd umu-launcher @@ -159,6 +161,7 @@ let umu = inputs.umu.packages.${system}.umu.override { version = inputs.umu.shortRev; truststore = true; + cbor2 = true; }; in { @@ -166,7 +169,7 @@ in } ``` > [!NOTE] -> truststore is an optional dependency that is enabled by default if you want to keep it that way you can remove the `truststore = true;` part +> truststore and cbor2 (for delta updates) are optional dependency which are enabled by default if you want to keep it that way you can remove the `truststore = true; cbor2 = true;` part > [!NOTE] > The example above relies on having your flake's `inputs` passed through to your nixos configuration. diff --git a/docs/umu.1.scd b/docs/umu.1.scd index 3a151c8a6..25dcc1651 100644 --- a/docs/umu.1.scd +++ b/docs/umu.1.scd @@ -142,10 +142,13 @@ _GAMEID_ Required. Can be an arbitrary value or a valid id in the *umu-database*[4]. _PROTONPATH_ - Optional. Path to a Proton directory, version name (e.g., GE-Proton9-5) or codename (e.g., GE-Proton). + Optional. Path to a Proton directory, version name (e.g., GE-Proton9-5) or + codename (e.g., GE-Proton). Otherwise, defaults to using UMU-Proton. + Valid codenames include: _GE-Proton_, _UMU-Latest_, and _GE-Latest_. + _WINEPREFIX_ Optional. Path to a WINE prefix directory. Otherwise defaults to _$HOME/Games/umu/$GAMEID_. diff --git a/packaging/deb/0001-deb-fix-build-by-using-rustup.patch b/packaging/deb/0001-deb-fix-build-by-using-rustup.patch new file mode 100644 index 000000000..b4cf25b87 --- /dev/null +++ b/packaging/deb/0001-deb-fix-build-by-using-rustup.patch @@ -0,0 +1,69 @@ +From 01a6b1c6e545fab6d01d24dfe1e240d327e77c3d Mon Sep 17 00:00:00 2001 +From: R1kaB3rN <100738684+R1kaB3rN@users.noreply.github.com> +Date: Fri, 3 Jan 2025 22:58:17 -0800 +Subject: [PATCH] deb: fix build by using rustup + +--- + Makefile.in | 10 +++++++++- + packaging/deb/debian/control | 1 + + packaging/deb/ubuntu/control | 1 + + 3 files changed, 11 insertions(+), 1 deletion(-) + +diff --git a/Makefile.in b/Makefile.in +index 03b18fc4..b9cfeb10 100644 +--- a/Makefile.in ++++ b/Makefile.in +@@ -3,6 +3,8 @@ PROJECT := umu-launcher + # Define the interpreters to use to prevent shebang complaints + PYTHON_INTERPRETER = /usr/bin/env python3 + ++SHELL_INTERPRETER = /usr/bin/env sh ++ + # If this is changed to umu (uppercase), `uninstall` target will also remove the SLR directory + INSTALLDIR ?= umu + +@@ -170,10 +172,16 @@ zipapp-install: zipapp + @echo "Standalone application 'umu-run' created at '$(DESTDIR)$(PREFIX)/bin'" + + PYTHON_PLATFORM_TAG = $(shell $(PYTHON_INTERPRETER) -c 'import sysconfig; print(sysconfig.get_config_var("EXT_SUFFIX"))') ++CARGO_BIN := $(HOME)/.cargo/bin/cargo ++RUSTUP_BIN := $(HOME)/.cargo/bin/rustup + + $(OBJDIR)/.build-umu-delta: | $(OBJDIR) + $(info :: Building delta dependencies ) +- cargo build -r ++ curl -LJO https://sh.rustup.rs ++ chmod u+x ./rustup-init.sh ++ $(SHELL_INTERPRETER) rustup-init.sh --default-toolchain none -y ++ $(RUSTUP_BIN) toolchain install 1.83 ++ $(CARGO_BIN) build -r + cp -a ./target/release/libumu_delta.so $(OBJDIR)/umu_delta$(PYTHON_PLATFORM_TAG) + + .PHONY: umu-delta +diff --git a/packaging/deb/debian/control b/packaging/deb/debian/control +index 243d1771..5372875d 100644 +--- a/packaging/deb/debian/control ++++ b/packaging/deb/debian/control +@@ -14,6 +14,7 @@ Build-Depends: + python3-installer, + python3-build, + python3-pip, ++ curl, + Standards-Version: 4.6.2 + Homepage: https://github.com/Open-Wine-Components/umu-launcher + Vcs-Browser: https://github.com/Open-Wine-Components/umu-launcher +diff --git a/packaging/deb/ubuntu/control b/packaging/deb/ubuntu/control +index 243d1771..5372875d 100644 +--- a/packaging/deb/ubuntu/control ++++ b/packaging/deb/ubuntu/control +@@ -14,6 +14,7 @@ Build-Depends: + python3-installer, + python3-build, + python3-pip, ++ curl, + Standards-Version: 4.6.2 + Homepage: https://github.com/Open-Wine-Components/umu-launcher + Vcs-Browser: https://github.com/Open-Wine-Components/umu-launcher +-- +2.47.1 + diff --git a/packaging/deb/debian/control b/packaging/deb/debian/control index 98ab7a4a3..243d17712 100644 --- a/packaging/deb/debian/control +++ b/packaging/deb/debian/control @@ -25,7 +25,7 @@ Architecture: all Depends: python3-umu-launcher (= ${binary:Version}), ${misc:Depends}, -Recommends: fontconfig, fonts-liberation, libasound2-plugins, libegl1, libexpat1, libfontconfig1, libgbm1, libnm0, libsdl2-2.0-0, libusb-1.0-0, libva-drm2, libva-glx2, libx11-6, libx11-xcb1, libxau6, libxcb-dri2-0, libxcb-glx0, libxcb-present0, libxcb-sync1, libxdamage1, libxdmcp6, libxext6, libxfixes3, libxss1, libxxf86vm1, mesa-vulkan-drivers, steam-devices, va-driver-all | va-driver, xdg-desktop-portal, xdg-desktop-portal-gtk | xdg-desktop-portal-backend, xdg-utils, xterm | x-terminal-emulator, zenity +Recommends: fontconfig, fonts-liberation, libasound2-plugins, libegl1, libexpat1, libfontconfig1, libgbm1, libnm0, libsdl2-2.0-0, libusb-1.0-0, libva-drm2, libva-glx2, libx11-6, libx11-xcb1, libxau6, libxcb-dri2-0, libxcb-glx0, libxcb-present0, libxcb-sync1, libxdamage1, libxdmcp6, libxext6, libxfixes3, libxss1, libxxf86vm1, mesa-vulkan-drivers, steam-devices, va-driver-all | va-driver, xdg-desktop-portal, xdg-desktop-portal-gtk | xdg-desktop-portal-backend, xdg-utils, xterm | x-terminal-emulator, zenity | libzstd, python3-xxhash, python3-cbor2 Suggests: libudev0, nvidia-driver-libs, nvidia-vulkan-icd, pipewire Description: A tool for launching non-steam games with proton. diff --git a/packaging/deb/debian/rules b/packaging/deb/debian/rules index a13178230..a3241c929 100755 --- a/packaging/deb/debian/rules +++ b/packaging/deb/debian/rules @@ -27,6 +27,7 @@ PYTHONDIR = /usr/lib/python3/dist-packages dh $@ override_dh_auto_configure: + patch -p1 < packaging/deb/0001-deb-fix-build-by-using-rustup.patch ./configure.sh --prefix=/usr override_dh_auto_build: diff --git a/packaging/deb/ubuntu/control b/packaging/deb/ubuntu/control index 98ab7a4a3..243d17712 100644 --- a/packaging/deb/ubuntu/control +++ b/packaging/deb/ubuntu/control @@ -25,7 +25,7 @@ Architecture: all Depends: python3-umu-launcher (= ${binary:Version}), ${misc:Depends}, -Recommends: fontconfig, fonts-liberation, libasound2-plugins, libegl1, libexpat1, libfontconfig1, libgbm1, libnm0, libsdl2-2.0-0, libusb-1.0-0, libva-drm2, libva-glx2, libx11-6, libx11-xcb1, libxau6, libxcb-dri2-0, libxcb-glx0, libxcb-present0, libxcb-sync1, libxdamage1, libxdmcp6, libxext6, libxfixes3, libxss1, libxxf86vm1, mesa-vulkan-drivers, steam-devices, va-driver-all | va-driver, xdg-desktop-portal, xdg-desktop-portal-gtk | xdg-desktop-portal-backend, xdg-utils, xterm | x-terminal-emulator, zenity +Recommends: fontconfig, fonts-liberation, libasound2-plugins, libegl1, libexpat1, libfontconfig1, libgbm1, libnm0, libsdl2-2.0-0, libusb-1.0-0, libva-drm2, libva-glx2, libx11-6, libx11-xcb1, libxau6, libxcb-dri2-0, libxcb-glx0, libxcb-present0, libxcb-sync1, libxdamage1, libxdmcp6, libxext6, libxfixes3, libxss1, libxxf86vm1, mesa-vulkan-drivers, steam-devices, va-driver-all | va-driver, xdg-desktop-portal, xdg-desktop-portal-gtk | xdg-desktop-portal-backend, xdg-utils, xterm | x-terminal-emulator, zenity | libzstd, python3-xxhash, python3-cbor2 Suggests: libudev0, nvidia-driver-libs, nvidia-vulkan-icd, pipewire Description: A tool for launching non-steam games with proton. diff --git a/packaging/deb/ubuntu/rules b/packaging/deb/ubuntu/rules index a13178230..a3241c929 100755 --- a/packaging/deb/ubuntu/rules +++ b/packaging/deb/ubuntu/rules @@ -27,6 +27,7 @@ PYTHONDIR = /usr/lib/python3/dist-packages dh $@ override_dh_auto_configure: + patch -p1 < packaging/deb/0001-deb-fix-build-by-using-rustup.patch ./configure.sh --prefix=/usr override_dh_auto_build: diff --git a/packaging/nix/combine.nix b/packaging/nix/combine.nix index 1f3e14293..958071005 100644 --- a/packaging/nix/combine.nix +++ b/packaging/nix/combine.nix @@ -1,8 +1,8 @@ -{ env, package, symlinkJoin,version,truststore }: +{ env, package, symlinkJoin,version,truststore,cbor2 }: symlinkJoin { name = "umu-run-bwrap"; paths = [ - (package.override {version = "${version}";truststore = truststore;}) + (package.override {version = "${version}";truststore = truststore;cbor2 = cbor2;}) (env.override {version = "${version}";}) ]; postBuild = '' diff --git a/packaging/nix/flake.nix b/packaging/nix/flake.nix index 9e38194cd..8b25a6b29 100644 --- a/packaging/nix/flake.nix +++ b/packaging/nix/flake.nix @@ -21,6 +21,6 @@ umu-launcher = nixpk.callPackage ./umu-launcher.nix { umu-launcher=umu-launcher-src; pyth1=pyth; version = "${version}"; }; umu-run = nixpk.callPackage ./umu-run.nix { package=umu-launcher; version = "${version}"; }; in{ - packages.x86_64-linux.umu = nixpk.callPackage ./combine.nix { env=umu-run; package=umu-launcher; version = "${version}"; truststore = true; }; + packages.x86_64-linux.umu = nixpk.callPackage ./combine.nix { env=umu-run; package=umu-launcher; version = "${version}"; truststore = true; cbor2 = true; }; }; } diff --git a/packaging/nix/umu-launcher.nix b/packaging/nix/umu-launcher.nix index da00e7878..008f35319 100644 --- a/packaging/nix/umu-launcher.nix +++ b/packaging/nix/umu-launcher.nix @@ -1,4 +1,4 @@ -{lib, pyth1 ,python3Packages , umu-launcher, pkgs,version, truststore ? true, ...}: +{lib, pyth1 ,python3Packages , umu-launcher, pkgs,version, truststore ? true, deltaUpdates ? { cbor2 = true; xxhash = true; zstd = true; }, rustPlatform, ...}: python3Packages.buildPythonPackage { pname = "umu-launcher"; version = "${version}"; @@ -13,13 +13,21 @@ python3Packages.buildPythonPackage { pkgs.python3Packages.installer pkgs.hatch pkgs.python3Packages.build + pkgs.cargo ]; + cargoDeps = rustPlatform.importCargoLock { + lockFile = ../../Cargo.lock; + }; + nativeBuildInputs = with rustPlatform; [ cargoSetupHook ]; propagatedBuildInputs = [ pyth1 pkgs.bubblewrap pkgs.python3Packages.xlib pkgs.python3Packages.urllib3 - ] ++ lib.optional truststore pkgs.python3Packages.truststore; + ] ++ lib.optional truststore pkgs.python3Packages.truststore + ++ lib.optional deltaUpdates.cbor2 pkgs.python3Packages.cbor2 + ++ lib.optional deltaUpdates.xxhash pkgs.python3Packages.xxhash + ++ lib.optional deltaUpdates.zstd pkgs.zstd; makeFlags = [ "PYTHON_INTERPRETER=${pyth1}/bin/python" "SHELL_INTERPRETER=/run/current-system/sw/bin/bash" "DESTDIR=${placeholder "out"}" ]; dontUseMesonConfigure = true; dontUseNinjaBuild = true; diff --git a/packaging/rpm/umu-launcher.spec b/packaging/rpm/umu-launcher.spec index f9879c1b2..fed77820d 100644 --- a/packaging/rpm/umu-launcher.spec +++ b/packaging/rpm/umu-launcher.spec @@ -40,6 +40,10 @@ Requires: python Requires: python3 Requires: python3-xlib +Recommends: python3-cbor2 +Recommends: python3-xxhash +Recommends: libzstd + %description %{name} A tool for launching non-steam games with proton diff --git a/packaging/snap/snap/snapcraft.yaml b/packaging/snap/snap/snapcraft.yaml index 1bafd0b8c..4a8b87d2e 100644 --- a/packaging/snap/snap/snapcraft.yaml +++ b/packaging/snap/snap/snapcraft.yaml @@ -246,6 +246,9 @@ parts: plugin: nil stage-packages: - python3-xlib + - python3-xxhash + - python3-cbor2 + - libzstd prime: - usr/lib/python3 diff --git a/pyproject.toml b/pyproject.toml index 613632e44..68fdf94bb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -41,10 +41,15 @@ dependencies = ["python-xlib>=0.33", "urllib3>=2.0.0,<3.0.0"] # Recommended # For network requests, use the system's CA bundle instead of certifi's cli = ["truststore"] +# Support for delta updates for Proton and the Steam Linux Runtime +delta-updates = ["cbor2>=5.4.6,<6.0.0", "xxhash>=3.2.0,<4.0.0", "pyzstd>=0.16.2,<1.0.0"] [project.scripts] umu-run = "umu.__main__:main" +[dependency-groups] +dev = ["pip", "hatch", "installer", "build", "patchelf", "maturin"] + [tool.hatch.build.targets.sdist] exclude = [ "/.github", diff --git a/requirements.in b/requirements.in index edf15f17a..32fe12913 100644 --- a/requirements.in +++ b/requirements.in @@ -1,2 +1,5 @@ python-xlib>=0.33 urllib3>=2.0.0,<3.0.0 +xxhash>=3.2.0 +pyzstd>=0.16.2 +cbor2>=5.4.6 diff --git a/src/lib.rs b/src/lib.rs new file mode 100644 index 000000000..71f74baff --- /dev/null +++ b/src/lib.rs @@ -0,0 +1,45 @@ +use base16ct::lower::encode_string; +use pyo3::prelude::*; +use sha2::{Digest, Sha512}; +use ssh_key::{PublicKey, SshSig}; + +/// Required parameter to create/verify digital signatures +/// See https://cvsweb.openbsd.org/src/usr.bin/ssh/PROTOCOL.sshsig?annotate=HEAD +const NAMESPACE: &str = "umu.openwinecomponents.org"; + +/// Whitelist of valid OpenSSH formatted, Ed25519 public keys +/// Used for delta updates to create the root of trust +const PUBLIC_KEYS: [&str; 1] = ["5b0b4cd1dad99cd013d5a88cf27d6c7414db33ece7f3146f96fb0f62c64ec15317a22f3f05048ac29177be9d95c47856e01b6e2a3dc61dd8202df4156465899c"]; + +#[pyfunction] +fn valid_key(source: &str) -> bool { + let hash = Sha512::digest(source.as_bytes()); + let hash_hex = &encode_string(&hash); + PUBLIC_KEYS.contains(&hash_hex.as_str()) +} + +#[pyfunction] +fn valid_signature(source: &str, message: &[u8], pem: &[u8]) -> bool { + let public_key = match PublicKey::from_openssh(source) { + Ok(ret) => ret, + Err(e) => { + eprintln!("{}", e); + return false; + } + }; + let ssh_sig = match SshSig::from_pem(pem) { + Ok(ret) => ret, + Err(e) => { + eprintln!("{}", e); + return false; + } + }; + public_key.verify(NAMESPACE, message, &ssh_sig).is_ok() +} + +#[pymodule(name = "umu_delta")] +fn umu(m: &Bound<'_, PyModule>) -> PyResult<()> { + m.add_function(wrap_pyfunction!(valid_signature, m)?)?; + m.add_function(wrap_pyfunction!(valid_key, m)?)?; + Ok(()) +} diff --git a/umu/umu_bspatch.py b/umu/umu_bspatch.py new file mode 100644 index 000000000..6f062217f --- /dev/null +++ b/umu/umu_bspatch.py @@ -0,0 +1,357 @@ +import os +from concurrent.futures import Future, ThreadPoolExecutor +from contextlib import suppress +from enum import Enum +from mmap import ACCESS_READ, ACCESS_WRITE, MADV_DONTNEED, mmap +from pathlib import Path +from shutil import rmtree +from typing import TypedDict + +from umu.umu_log import log +from umu.umu_util import memfdfile + +with suppress(ModuleNotFoundError): + from pyzstd import DParameter, ZstdDict, decompress + from xxhash import xxh3_64_intdigest + + +class FileType(Enum): + """Represents an file type.""" + + # File types currently supported by mtree(1) + File = "file" + Block = "block" + Char = "char" + Dir = "dir" + Fifo = "fifo" + Link = "link" + Socket = "socket" + + +class Entry(TypedDict): + """Represents an entry within a patch section of a patch file.""" + + # Binary delta data, compressed data or symbolic link's target + data: bytes + # File mode bits as decimal + mode: int + # File's name as a relative path with the base name ommitted + # e.g., protonfixes/gamefixes-umu/umu-zenlesszonezero.py + name: str + # File's type + type: FileType + # xxhash result after applying the binary patch + xxhash: int + # File's modification time + time: float + # File's size + size: int + + +class ManifestEntry(TypedDict): + """Represents an entry within a manifest section of a patch file.""" + + # File mode bits as decimal + mode: int + # File's name as a relative path with the base name omitted + # e.g., protonfixes/gamefixes-umu/umu-zenlesszonezero.py + name: str + # xxhash result + xxhash: int + # File size + size: int + # File modification time + time: float + + +class Content(TypedDict): + """Represent a child of the root section, containing patch sections of a patch file.""" + + manifest: list[ManifestEntry] + # List of binaries to add in target directory + add: list[Entry] + # List of binaries to update in target directory + update: list[Entry] + # List of binaries to delete in target directory + delete: list[Entry] + source: str + target: str + + +class ContentContainer(TypedDict): + """Represent the root section of a patch file.""" + + contents: list[Content] + # Ed25519 digital signature of 'contents' + signature: tuple[bytes, bytes] + # Ed25519 SSH public key + public_key: tuple[bytes, bytes] + + +MMAP_MIN = 16 * 1024 + +ZSTD_WINDOW_LOG_MIN = 10 + + +class CustomPatcher: + """Class for updating the contents within a compatibility tool directory. + + Intended to update supported tools like Proton and the Steam Linux Runtime within + $XDG_DATA_HOME/umu. + + Given a patch file and two directories, 'a' and 'b', that have similar structure + and where 'a' is already present on the system, will update all the contents within + 'a' to recreate 'b'. The patch file format will drive behavior and will contain all + the necessary data and metadata to create 'b'. + """ + + def __init__( # noqa: D107 + self, + content: Content, + compat_tool: Path, + thread_pool: ThreadPoolExecutor, + ) -> None: + self._arc_contents: Content = content + self._arc_manifest: list[ManifestEntry] = self._arc_contents["manifest"] + self._compat_tool = compat_tool + self._thread_pool = thread_pool + self._futures: list[Future] = [] + + def add_binaries(self) -> None: + """Add binaries within a compatibility tool. + + Handles the case where the subdirectory contents in 'b' are not in 'a'. + + Will only operate on files, links and directories. Files will be recreated by + decompressing the data in the patch item. Links will be symlinked to its target + and directories will be created. + """ + # Create new files, if there are any items + for item in self._arc_contents["add"]: + build_file: Path = self._compat_tool.joinpath(item["name"]) + if item["type"] == FileType.File.value: + # Decompress the zstd data and write the file + self._futures.append( + self._thread_pool.submit(self._write_proton_file, build_file, item) + ) + continue + if item["type"] == FileType.Link.value: + build_file.symlink_to(item["data"]) + continue + if item["type"] == FileType.Dir.value: + build_file.mkdir(mode=item["mode"], exist_ok=True, parents=True) + continue + log.warning( + "Found file '%s' with type '%s', skipping its inclusion", + item["name"], + item["type"], + ) + + def update_binaries(self) -> None: + """Update binaries within a compatibility tool. + + Handles the case where the subdirectory contents between 'a' and 'b' differ, + where 'b' is the new version. + + Will apply a binary patch for files that need to be updated. Directories will + have its permissions changed. Links will be deleted. + """ + for item in self._arc_contents["update"]: + build_file: Path = self._compat_tool.joinpath(item["name"]) + if item["type"] == FileType.File.value: + # For files, apply a binary patch + self._futures.append( + self._thread_pool.submit(self._patch_proton_file, build_file, item) + ) + continue + if item["type"] == FileType.Dir.value: + # For directories, change permissions + os.chmod(build_file, item["mode"], follow_symlinks=False) # noqa: PTH101 + continue + if item["type"] == FileType.Link.value: + # For links, replace the links + build_file.unlink() + build_file.symlink_to(item["data"]) + continue + log.warning( + "Found file '%s' with type '%s', skipping its update", + item["name"], + item["type"], + ) + + def delete_binaries(self) -> None: + """Delete obsolete binaries within a compatibility tool. + + Handles the case where the subdirectory contents of 'a' are not in 'b', + where 'b' is the new version. + + Will only operate on links, normal files, and directories while skipping + everything else. + """ + for item in self._arc_contents["delete"]: + if ( + item["type"] == FileType.File.value + or item["type"] == FileType.Link.value + ): + self._compat_tool.joinpath(item["name"]).unlink(missing_ok=True) + continue + if item["type"] == FileType.Dir.value: + self._thread_pool.submit( + rmtree, str(self._compat_tool.joinpath(item["name"])) + ) + continue + log.warning( + "Found file '%s' with type '%s', skipping its update", + item["name"], + item["type"], + ) + + def verify_integrity(self) -> None: + """Verify the expected mode, size, file and digest of the compatibility tool.""" + for item in self._arc_manifest: + self._futures.append( + self._thread_pool.submit(self._check_binaries, self._compat_tool, item) + ) + + def result(self) -> list[Future]: + """Return the currently submitted tasks.""" + return self._futures + + def _check_binaries( + self, proton: Path, item: ManifestEntry + ) -> ManifestEntry | None: + rpath: Path = proton.joinpath(item["name"]) + + try: + with rpath.open("rb") as fp: + stats: os.stat_result = os.fstat(fp.fileno()) + xxhash: int = 0 + if item["size"] != stats.st_size: + log.error( + "Expected size %s, received %s", item["size"], stats.st_size + ) + return None + if item["mode"] != stats.st_mode: + log.error( + "Expected mode %s, received %s", item["mode"], stats.st_mode + ) + return None + if stats.st_size > MMAP_MIN: + with mmap(fp.fileno(), length=0, access=ACCESS_READ) as mm: + # Ignore. Passing an mmap is valid here + # See https://docs.python.org/3/library/mmap.html#module-mmap + xxhash = xxh3_64_intdigest(mm) # type: ignore + mm.madvise(MADV_DONTNEED, 0, stats.st_size) + else: + xxhash = xxh3_64_intdigest(fp.read()) + if item["xxhash"] != xxhash: + log.error("Expected xxhash %s, received %s", item["xxhash"], xxhash) + return None + except FileNotFoundError: + log.debug("Aborting partial update, file not found: %s", rpath) + return None + + return item + + def _patch_proton_file(self, path: Path, item: Entry) -> None: + bdiff: bytes = item["data"] + digest: int = item["xxhash"] + mode: int = item["mode"] + size: int = item["size"] + + try: + # Since some wine binaries are missing the writable bit and + # we're memory mapping files. Before applying a binary patch, + # ensure the file is writable + os.chmod(path, 0o700, follow_symlinks=False) # noqa: PTH101 + + # With our patch file, apply the delta in place + with path.open("rb+") as fp: + stats: os.stat_result = os.stat(fp.fileno()) # noqa: PTH116 + xxhash: int = 0 + + # If less than the window log, write the data + # The patcher inserts the raw, decompressed data in this case + if max(stats.st_size, size).bit_length() < ZSTD_WINDOW_LOG_MIN: + fp.write(bdiff) + fp.truncate(size) + os.lseek(fp.fileno(), 0, os.SEEK_SET) + + xxhash = xxh3_64_intdigest(fp.read()) + if xxhash != digest: + err: str = ( + f"Expected xxhash {digest}, received {xxhash} for file " + f"'{path}' truncating from size {stats.st_size} -> {size}" + ) + raise ValueError(err) + + os.fchmod(fp.fileno(), mode) + return + + # Apply our patch to the file in-place + with mmap(fp.fileno(), length=0, access=ACCESS_WRITE) as mm: + # Prepare the zst dictionary and opt + zst_dict = ZstdDict(mm, is_raw=True) + zst_opt = {DParameter.windowLogMax: 31} + + # If file will become large, increase + if stats.st_size < size: + mm.resize(size) + + # Patch the region + mm[:size] = decompress( + bdiff, zstd_dict=zst_dict.as_prefix, option=zst_opt + ) + + # If file will become small, decrease + if size < stats.st_size: + mm.resize(size) + + # Ignore. Passing an mmap is valid + xxhash = xxh3_64_intdigest(mm) # type: ignore + + if xxhash != digest: + err: str = ( + f"Expected xxhash {digest}, received {xxhash} for " + f"file '{path}' truncating from size {stats.st_size} -> {size}" + ) + raise ValueError(err) + + mm.madvise(MADV_DONTNEED, 0, size) + + # Update the file's metadata + os.fchmod(fp.fileno(), mode) + except BaseException as e: + log.exception(e) + log.warning("File '%s' may be corrupt and has mode bits 0o700", path) + raise + + def _write_proton_file(self, path: Path, item: Entry) -> None: + data: bytes = item["data"] + digest: int = item["xxhash"] + mode: int = item["mode"] + size: int = item["size"] + + with memfdfile(path.name) as fp: + xxhash: int = 0 + + fp.truncate(size) + + # Decompress our data and write to our file + with mmap(fp.fileno(), length=0, access=ACCESS_WRITE) as mm: + mm[:] = decompress(data) + # Ignore. Passing an mmap is valid + xxhash = xxh3_64_intdigest(mm) # type: ignore + + if xxhash != digest: + err: str = ( + f"Expected xxhash {digest}, received {xxhash} for fd " + f"{fp.fileno()} from source {path}" + ) + raise ValueError(err) + + with path.open("wb") as file: + os.sendfile(file.fileno(), fp.fileno(), 0, size) + os.fchmod(file.fileno(), mode) + + mm.madvise(MADV_DONTNEED, 0, size) diff --git a/umu/umu_consts.py b/umu/umu_consts.py index c9b5f80f1..14d95978e 100644 --- a/umu/umu_consts.py +++ b/umu/umu_consts.py @@ -87,6 +87,9 @@ class GamescopeAtom(Enum): # Temporary directory for downloaded resources moved from tmpfs UMU_CACHE: Path = XDG_CACHE_HOME.joinpath("umu") +# Directory storing Proton and other compatibility tools built against the SLR +UMU_COMPAT: Path = XDG_DATA_HOME.joinpath("umu", "compatibilitytools") + # Constant defined in prctl.h # See prctl(2) for more details PR_SET_CHILD_SUBREAPER = 36 diff --git a/umu/umu_proton.py b/umu/umu_proton.py index d604d666c..147260aa8 100644 --- a/umu/umu_proton.py +++ b/umu/umu_proton.py @@ -1,10 +1,13 @@ import os -from concurrent.futures import Future, ThreadPoolExecutor +import time +from concurrent.futures import ThreadPoolExecutor +from enum import Enum from hashlib import sha512 from http import HTTPStatus +from importlib.util import find_spec from pathlib import Path from re import split as resplit -from shutil import move, rmtree +from shutil import move from tempfile import TemporaryDirectory from typing import Any @@ -13,7 +16,8 @@ from urllib3.poolmanager import PoolManager from urllib3.response import BaseHTTPResponse -from umu.umu_consts import STEAM_COMPAT, UMU_CACHE, UMU_LOCAL, HTTPMethod +from umu.umu_bspatch import Content, ContentContainer, CustomPatcher +from umu.umu_consts import STEAM_COMPAT, UMU_CACHE, UMU_COMPAT, UMU_LOCAL, HTTPMethod from umu.umu_log import log from umu.umu_util import ( extract_tarfile, @@ -34,6 +38,15 @@ SessionCaches = tuple[CacheTmpfs, CacheSubdir] +class ProtonVersion(Enum): + """Represent valid version keywords for Proton.""" + + GE = "GE-Proton" + UMU = "UMU-Proton" + GELatest = "GE-Latest" + UMULatest = "UMU-Latest" + + def get_umu_proton(env: dict[str, str], session_pools: SessionPools) -> dict[str, str]: """Attempt to use the latest Proton when configured. @@ -50,23 +63,31 @@ def get_umu_proton(env: dict[str, str], session_pools: SessionPools) -> dict[str # First element is the digest asset, second is the Proton asset. Each asset # will contain the asset's name and the URL that hosts it. assets: tuple[tuple[str, str], tuple[str, str]] | tuple[()] = () + patch: bytes = b"" + STEAM_COMPAT.mkdir(exist_ok=True, parents=True) UMU_CACHE.mkdir(parents=True, exist_ok=True) try: log.debug("Sending request to 'api.github.com'...") assets = _fetch_releases(session_pools) + # TODO: Refactor this function later. It's basically the same as _fetch_releases + patch = _fetch_patch(session_pools) except HTTPError: log.debug("Network is unreachable") - with ( - TemporaryDirectory() as tmp, - TemporaryDirectory(dir=UMU_CACHE) as tmpcache, - ): + with TemporaryDirectory() as tmp, TemporaryDirectory(dir=UMU_CACHE) as tmpcache: tmpdirs: SessionCaches = (Path(tmp), Path(tmpcache)) - if _get_latest(env, STEAM_COMPAT, tmpdirs, assets, session_pools) is env: + compatdirs = (UMU_COMPAT, STEAM_COMPAT) + if _get_delta(env, UMU_COMPAT, patch, assets, session_pools) is env: + log.info("%s is up to date", os.environ["PROTONPATH"]) + os.environ["PROTONPATH"] = str( + UMU_COMPAT.joinpath(os.environ["PROTONPATH"]) + ) + return env + if _get_latest(env, compatdirs, tmpdirs, assets, session_pools) is env: return env - if _get_from_steamcompat(env, STEAM_COMPAT) is env: + if _get_from_compat(env, compatdirs) is env: return env os.environ["PROTONPATH"] = "" @@ -74,6 +95,50 @@ def get_umu_proton(env: dict[str, str], session_pools: SessionPools) -> dict[str return env +def _fetch_patch(session_pools: SessionPools) -> bytes: + resp: BaseHTTPResponse + _, http_pool = session_pools + url: str = "https://api.github.com" + repo: str = "/repos/Open-Wine-Components/umu-mkpatch/releases" + headers: dict[str, str] = { + "Accept": "application/vnd.github+json", + "X-GitHub-Api-Version": "2022-11-28", + "User-Agent": "", + } + durl: str = "" + + if not find_spec("cbor2") and not find_spec("xxhash"): + return b"" + + resp = http_pool.request(HTTPMethod.GET.value, f"{url}{repo}", headers=headers) + if resp.status != HTTPStatus.OK: + return b"" + + releases = resp.json() or [] + for release in releases: + for asset in release.get("assets", []): + if not asset["name"].endswith("cbor"): + continue + if asset["name"].startswith(os.environ["PROTONPATH"]): + durl = asset["browser_download_url"] + log.info("URL: %s", durl) + break + if asset["name"].startswith(os.environ["PROTONPATH"]): + durl = asset["browser_download_url"] + log.info("URL: %s", durl) + break + + if not durl: + return b"" + + resp = http_pool.request(HTTPMethod.GET.value, durl, headers=headers) + if resp.status != HTTPStatus.OK: + return b"" + + # Typing. False negative in mypy and urllib3's (v2) API guarantees the type + return resp.data # type: ignore + + def _fetch_releases( session_pools: SessionPools, ) -> tuple[tuple[str, str], tuple[str, str]] | tuple[()]: @@ -92,7 +157,10 @@ def _fetch_releases( "User-Agent": "", } - if os.environ.get("PROTONPATH") == "GE-Proton": + if os.environ.get("PROTONPATH") in { + ProtonVersion.GE.value, + ProtonVersion.GELatest.value, + }: repo = "/repos/GloriousEggroll/proton-ge-custom/releases/latest" resp = http_pool.request(HTTPMethod.GET.value, f"{url}{repo}", headers=headers) @@ -255,44 +323,42 @@ def _fetch_proton( return env -def _get_from_steamcompat( - env: dict[str, str], steam_compat: Path +def _get_from_compat( + env: dict[str, str], compats: tuple[Path, Path] ) -> dict[str, str] | None: - """Refer to Steam's compatibilitytools.d folder for any existing Protons. + """Refer to any 'compatibilitytools' folders for any existing Protons. When an error occurs in the process of using the latest Proton build either from a digest mismatch, request failure or unreachable network, the latest existing Proton build of that same version will be used """ - version: str = ( - "GE-Proton" if os.environ.get("PROTONPATH") == "GE-Proton" else "UMU-Proton" - ) - - try: - latest: Path = max( - ( - proton - for proton in steam_compat.glob("*") - if proton.name.startswith(version) - ), - key=lambda proton: [ - int(text) if text.isdigit() else text.lower() - for text in resplit(r"(\d+)", proton.name) - ], - ) - log.info("%s found in '%s'", latest.name, steam_compat) - log.info("Using %s", latest.name) - os.environ["PROTONPATH"] = str(latest) - env["PROTONPATH"] = os.environ["PROTONPATH"] - except ValueError: - return None + version: str = os.environ.get("PROTONPATH", ProtonVersion.UMU.value) + + for compat in compats: + try: + latest: Path = max( + filter( + lambda proton: proton.name.startswith(version), compat.glob("*") + ), + key=lambda proton: [ + int(text) if text.isdigit() else text.lower() + for text in resplit(r"(\d+)", proton.name) + ], + ) + log.info("%s found in '%s'", latest.name, compat) + log.info("Using %s", latest.name) + os.environ["PROTONPATH"] = str(latest) + env["PROTONPATH"] = os.environ["PROTONPATH"] + return env + except ValueError: + continue - return env + return None def _get_latest( env: dict[str, str], - steam_compat: Path, + compat_tools: tuple[Path, Path], session_caches: SessionCaches, assets: tuple[tuple[str, str], tuple[str, str]] | tuple[()], session_pools: SessionPools, @@ -307,28 +373,32 @@ def _get_latest( When the digests mismatched or when interrupted, an old build will in $HOME/.local/share/Steam/compatibilitytool.d will be used. """ + umu_compat, steam_compat = compat_tools # Name of the Proton archive (e.g., GE-Proton9-7.tar.gz) tarball: str # Name of the Proton directory (e.g., GE-Proton9-7) proton: str # Name of the Proton version, which is either UMU-Proton or GE-Proton - version: str + version: str = ProtonVersion.UMU.value lockfile: str = f"{UMU_LOCAL}/compatibilitytools.d.lock" + latest_candidates: set[str] if not assets: return None tarball = assets[1][0] proton = tarball.removesuffix(".tar.gz") - version = ( - "GE-Proton" if os.environ.get("PROTONPATH") == "GE-Proton" else "UMU-Proton" - ) + latest_candidates = { + ProtonVersion.GELatest.value, + ProtonVersion.UMULatest.value, + } + + if os.environ.get("PROTONPATH") in {member.value for member in ProtonVersion}: + version = os.environ["PROTONPATH"] # Return if the latest Proton is already installed if steam_compat.joinpath(proton).is_dir(): log.info("%s is up to date", version) - steam_compat.joinpath("UMU-Latest").unlink(missing_ok=True) - steam_compat.joinpath("UMU-Latest").symlink_to(proton) os.environ["PROTONPATH"] = str(steam_compat.joinpath(proton)) env["PROTONPATH"] = os.environ["PROTONPATH"] return env @@ -341,91 +411,55 @@ def _get_latest( if steam_compat.joinpath(proton).is_dir(): raise FileExistsError + if umu_compat.joinpath(version).is_dir(): + raise FileExistsError + # Download the archive to a temporary directory _fetch_proton(env, session_caches, assets, session_pools) # Extract the archive then move the directory - _install_proton( - tarball, session_caches, steam_compat, session_pools - ) - except ( - ValueError, - KeyboardInterrupt, - HTTPError, - ) as e: + _install_proton(tarball, session_caches, compat_tools) + except (ValueError, KeyboardInterrupt, HTTPError) as e: log.exception(e) return None except FileExistsError: # Proton was installed by another proc, continue pass - log.debug("Released file lock '%s'", lockfile) - os.environ["PROTONPATH"] = str(steam_compat.joinpath(proton)) + # At this point, Proton is installed + # Now depending on the codename, use a different base path + if version in latest_candidates: + os.environ["PROTONPATH"] = str(umu_compat.joinpath(version)) + log.info("Using %s", version) + else: + os.environ["PROTONPATH"] = str(steam_compat.joinpath(proton)) + log.info("Using %s", proton) + env["PROTONPATH"] = os.environ["PROTONPATH"] - log.debug("Removing: %s", tarball) - log.info("Using %s", proton) return env -def _update_proton( - protons: list[Path], - thread_pool: ThreadPoolExecutor, -) -> None: - """Remove previous stable UMU-Proton builds. - - Assumes that the directories that are named ULWGL/UMU-Proton are ours and - will be removed, so users should not be storing important files there. - """ - futures: list[Future] = [] - log.debug("Updating UMU-Proton") - log.debug("Previous builds: %s", protons) - - if not protons: - return - - for stable in protons: - if stable.is_dir(): - log.debug("Previous stable build found") - log.debug("Removing: %s", stable) - futures.append(thread_pool.submit(rmtree, str(stable))) - - for future in futures: - future.result() - - def _install_proton( tarball: str, session_caches: SessionCaches, - steam_compat: Path, - session_pools: SessionPools, + compat_tools: tuple[Path, Path], ) -> None: """Install a Proton directory to Steam's compatibilitytools.d. An installation is primarily composed of two steps: extract and move. A UMU-Proton or GE-Proton build will first be extracted to a secure temporary directory then moved to compatibilitytools.d, which is expected to be in - $HOME. In the case of UMU-Proton, an installation will include a remove - step, where old builds will be removed in parallel. + $HOME. """ - future: Future | None = None + umu_compat, steam_compat = compat_tools tmpfs, cache = session_caches - thread_pool, _ = session_pools parts: str = f"{tarball}.parts" cached_parts: Path = cache.parent.joinpath(f"{tarball}.parts") - version: str = ( - "GE-Proton" if os.environ.get("PROTONPATH") == "GE-Proton" else "UMU-Proton" - ) - - # TODO: Refactor when differential updates are implemented. - # Remove all previous builds when the build is UMU-Proton - if version == "UMU-Proton": - protons: list[Path] = [ - file - for file in steam_compat.glob("*") - if file.name.startswith(("UMU-Proton", "ULWGL-Proton")) - ] - future = thread_pool.submit(_update_proton, protons, thread_pool) + latest_candidates: set[str] = { + ProtonVersion.GELatest.value, + ProtonVersion.UMULatest.value, + } # Move our file and extract within our cache if cached_parts.is_file(): @@ -445,17 +479,167 @@ def _install_proton( log.info("Extracting %s...", tarball) extract_tarfile(cache.joinpath(tarball), cache.joinpath(tarball).parent) - # Move decompressed archive to compatibilitytools.d - log.info( - "%s -> %s", - cache.joinpath(tarball.removesuffix(".tar.gz")), - steam_compat, + # Move decompressed archive to compatibilitytools.d or + # $XDG_DATA_HOME/umu/compatibilitytools + if os.environ.get("PROTONPATH") in latest_candidates: + log.info( + "%s -> %s", cache.joinpath(tarball.removesuffix(".tar.gz")), umu_compat + ) + move( + cache.joinpath(tarball.removesuffix(".tar.gz")), + umu_compat / os.environ["PROTONPATH"], + ) + else: + log.info( + "%s -> %s", cache.joinpath(tarball.removesuffix(".tar.gz")), steam_compat + ) + move(cache.joinpath(tarball.removesuffix(".tar.gz")), steam_compat) + + +def _get_delta( + env: dict[str, str], + umu_compat: Path, + patch: bytes, + assets: tuple[tuple[str, str], tuple[str, str]] | tuple[()], + session_pools: SessionPools, +) -> dict[str, str] | None: + thread_pool, _ = session_pools + version: str = ( + "GE-Latest" if os.environ.get("PROTONPATH") == "GE-Latest" else "UMU-Latest" ) - move(cache.joinpath(tarball.removesuffix(".tar.gz")), steam_compat) + proton: Path = umu_compat.joinpath(version) + lockfile: str = f"{UMU_LOCAL}/compatibilitytools.d.lock" + cbor: ContentContainer + + if not assets: + return None + + if os.environ.get("PROTONPATH") not in { + ProtonVersion.GELatest.value, + ProtonVersion.UMULatest.value, + }: + log.debug("PROTONPATH not *-Latest, skipping") + return None + + if not patch: + log.debug("Received empty byte string for patch, skipping") + return None + + from cbor2 import CBORDecodeError, dumps, loads + + from .umu_delta import valid_key, valid_signature + + try: + cbor = loads(patch) + except CBORDecodeError as e: + log.exception(e) + return None + + log.debug("Acquiring lock '%s'", lockfile) + with unix_flock(lockfile): + tarball, _ = assets[1] + build: str = tarball.removesuffix(".tar.gz") + buildid: Path = umu_compat.joinpath(version, "compatibilitytool.vdf") + + log.info("Build: %s", build) + log.debug("Acquired lock '%s'", lockfile) + + # Check if we're up to date by doing a simple file check + # Avoids the cost of creating threads and memory-mapped IO + try: + with buildid.open(encoding="utf-8") as file: + is_updated: bool = any(filter(lambda line: build in line, file)) # type: ignore + if is_updated: + log.info("%s is up to date", version) + os.environ["PROTONPATH"] = str(umu_compat.joinpath(version)) + env["PROTONPATH"] = os.environ["PROTONPATH"] + return env + except (UnicodeDecodeError, FileNotFoundError): + # Case when the VDF file DNE/or has non-utf-8 chars + log.error( + "Failed opening file '%s', unable to determine latest build", buildid + ) + return None + + # Validate the integrity of the embedded public key. Use RustCrypto's SHA2 + # implementation to keep the security boundary consistent + public_key, _ = cbor["public_key"] + if not valid_key(public_key): + # OWC maintainer forgot to add digest to whitelist, a different public key + # was accidentally used or patch was created by a 3rd party + log.error( + "Digest mismatched for public key '%s', skipping", cbor["public_key"] + ) + return None + + # With the public key, verify the signature and data + signature, _ = cbor["signature"] + if not valid_signature( + public_key, dumps(cbor["contents"], canonical=True), signature + ): + log.error("Digital signature verification failed, skipping") + return None + + patchers: list[CustomPatcher | None] = [] + renames: list[tuple[Path, Path]] = [] + + # Apply the patch + for content in cbor["contents"]: + src: str = content["source"] + + if src.startswith((ProtonVersion.GE.value, ProtonVersion.UMU.value)): + patchers.append(_apply_delta(proton, content, thread_pool)) + continue + + subdir: Path | None = next(umu_compat.joinpath(version).rglob(src), None) + if not subdir: + log.error("Could not find subdirectory '%s', skipping", subdir) + continue + + patchers.append(_apply_delta(subdir, content, thread_pool)) + renames.append((subdir, subdir.parent / content["target"])) + + # Wait for results and rename versioned subdirectories + start: float = time.time_ns() + for patcher in filter(None, patchers): + for future in filter(None, patcher.result()): + future.result() + + for rename in renames: + orig, new = rename + orig.rename(new) + log.debug("Update time (ns): %s", time.time_ns() - start) + + return env + + +def _apply_delta( + path: Path, + content: Content, + thread_pool: ThreadPoolExecutor, +) -> CustomPatcher | None: + patcher: CustomPatcher = CustomPatcher(content, path, thread_pool) + is_updated: bool = False + + # Verify the identity of the build. At this point the patch file is authenticated. + # Note, this will skip the update if the user had tinkered with their build. We do + # this so we can ensure the result of each binary patch isn't garbage + patcher.verify_integrity() + + for item in patcher.result(): + if item.result() is None: + is_updated = True + break + + if is_updated: + log.debug("%s (latest) validation failed, skipping", os.environ["PROTONPATH"]) + return None + + # Patch the current build, upgrading proton to the latest + log.info("%s is OK, applying partial update...", os.environ["PROTONPATH"]) - steam_compat.joinpath("UMU-Latest").unlink(missing_ok=True) - steam_compat.joinpath("UMU-Latest").symlink_to(tarball.removesuffix(".tar.gz")) - log.debug("Linking: UMU-Latest -> %s", tarball.removesuffix(".tar.gz")) + patcher.update_binaries() + patcher.add_binaries() + patcher.delete_binaries() - if future: - future.result() + return patcher diff --git a/umu/umu_run.py b/umu/umu_run.py index 4a4244a29..2a45fcdd7 100755 --- a/umu/umu_run.py +++ b/umu/umu_run.py @@ -132,15 +132,20 @@ def check_env( if os.environ.get("UMU_NO_PROTON") == "1": return env + path: Path = STEAM_COMPAT.joinpath(os.environ.get("PROTONPATH", "")) + if os.environ.get("PROTONPATH") and path.name == "UMU-Latest": + path.unlink(missing_ok=True) + # Proton Version - if ( - os.environ.get("PROTONPATH") - and Path(STEAM_COMPAT, os.environ["PROTONPATH"]).is_dir() - ): + if os.environ.get("PROTONPATH") and path.is_dir(): os.environ["PROTONPATH"] = str(STEAM_COMPAT.joinpath(os.environ["PROTONPATH"])) # GE-Proton - if os.environ.get("PROTONPATH") == "GE-Proton": + if os.environ.get("PROTONPATH") in { + "GE-Proton", + "GE-Latest", + "UMU-Latest", + }: get_umu_proton(env, session_pools) if "PROTONPATH" not in os.environ: diff --git a/umu/umu_test.py b/umu/umu_test.py index e1a5aa515..a23758ba6 100644 --- a/umu/umu_test.py +++ b/umu/umu_test.py @@ -8,6 +8,7 @@ from argparse import Namespace from array import array from concurrent.futures import ThreadPoolExecutor +from importlib.util import find_spec from pathlib import Path from pwd import getpwuid from shutil import copy, copytree, move, rmtree @@ -16,7 +17,6 @@ NamedTemporaryFile, TemporaryDirectory, TemporaryFile, - mkdtemp, ) from unittest.mock import MagicMock, Mock, patch @@ -85,6 +85,8 @@ def setUp(self): self.test_cache = Path("./tmp.5HYdpddgvs") # Steam compat dir self.test_compat = Path("./tmp.ZssGZoiNod") + # umu compat dir + self.test_umu_compat = Path("./tmp/tmp.tu692WxQHH") # umu-proton dir self.test_proton_dir = Path("UMU-Proton-5HYdpddgvs") # umu-proton release @@ -186,6 +188,270 @@ def tearDown(self): if self.test_cache_home.exists(): rmtree(self.test_cache_home.as_posix()) + if self.test_umu_compat.exists(): + rmtree(self.test_umu_compat.as_posix()) + + def test_get_delta_invalid_sig(self): + """Test get_delta when patch signature is invalid.""" + mock_assets = (("foo", "foo"), ("foo.tar.gz", "foo")) + os.environ["PROTONPATH"] = umu_proton.ProtonVersion.UMULatest.value + result = None + + # If either cbor2 or the Rust module DNE, skip + try: + from cbor2 import dumps + except ModuleNotFoundError: + err = "python3-cbor2 not installed" + self.skipTest(err) + + if find_spec("umu_delta") is None: + err = "umu_delta module not compiled" + self.skipTest(err) + + mock_patch = dumps( + {"public_key": "foo", "signature": b"bar", "contents": ["baz"]} + ) + mock_ctx = MagicMock() + mock_ctx.__enter__ = MagicMock(return_value=None) + mock_ctx.__exit__ = MagicMock(return_value=None) + + self.test_umu_compat.joinpath(os.environ["PROTONPATH"]).mkdir( + parents=True, exist_ok=True + ) + + self.test_umu_compat.joinpath( + os.environ["PROTONPATH"], "compatibilitytool.vdf" + ).touch(exist_ok=True) + + # When the value within the vdf file and GH asset Proton value differ, we update. + # Change the value here, to simulate a latest update scenario + self.test_umu_compat.joinpath( + os.environ["PROTONPATH"], "compatibilitytool.vdf" + ).write_text("bar") + + with ( + patch.object(umu_proton, "unix_flock", return_value=mock_ctx), + patch("umu.umu_delta.valid_key", lambda _: True), + ): + result = umu_proton._get_delta( + self.env, + self.test_umu_compat, + mock_patch, + mock_assets, + self.test_session_pools, + ) + + self.assertTrue(result is None, f"Expected None, received {result}") + + def test_get_delta_invalid_key(self): + """Test get_delta when public key is invalid.""" + mock_assets = (("foo", "foo"), ("foo.tar.gz", "foo")) + os.environ["PROTONPATH"] = umu_proton.ProtonVersion.UMULatest.value + result = None + + # If either cbor2 or the Rust module DNE, skip + try: + from cbor2 import dumps + except ModuleNotFoundError: + err = "python3-cbor2 not installed" + self.skipTest(err) + + if find_spec("umu_delta") is None: + err = "umu_delta module not compiled" + self.skipTest(err) + + mock_patch = dumps({"public_key": "foo"}) + mock_ctx = MagicMock() + mock_ctx.__enter__ = MagicMock(return_value=None) + mock_ctx.__exit__ = MagicMock(return_value=None) + + self.test_umu_compat.joinpath(os.environ["PROTONPATH"]).mkdir( + parents=True, exist_ok=True + ) + + self.test_umu_compat.joinpath( + os.environ["PROTONPATH"], "compatibilitytool.vdf" + ).touch(exist_ok=True) + + # When the value within the vdf file and GH asset Proton value differ, we update. + # Change the value here, to simulate a latest update scenario + self.test_umu_compat.joinpath( + os.environ["PROTONPATH"], "compatibilitytool.vdf" + ).write_text("bar") + + with patch.object(umu_proton, "unix_flock", return_value=mock_ctx): + result = umu_proton._get_delta( + self.env, + self.test_umu_compat, + mock_patch, + mock_assets, + self.test_session_pools, + ) + + self.assertTrue(result is None, f"Expected None, received {result}") + + def test_get_delta_check_update(self): + """Test get_delta when checking if latest is installed.""" + mock_assets = (("foo", "foo"), ("foo.tar.gz", "foo")) + os.environ["PROTONPATH"] = umu_proton.ProtonVersion.UMULatest.value + result = None + + # If either cbor2 or the Rust module DNE, skip + try: + from cbor2 import dumps + except ModuleNotFoundError: + err = "python3-cbor2 not installed" + self.skipTest(err) + + if find_spec("umu_delta") is None: + err = "umu_delta module not compiled" + self.skipTest(err) + + mock_patch = dumps({"foo": "foo"}) + mock_ctx = MagicMock() + mock_ctx.__enter__ = MagicMock(return_value=None) + mock_ctx.__exit__ = MagicMock(return_value=None) + + self.test_umu_compat.joinpath(os.environ["PROTONPATH"]).mkdir( + parents=True, exist_ok=True + ) + + self.test_umu_compat.joinpath( + os.environ["PROTONPATH"], "compatibilitytool.vdf" + ).touch(exist_ok=True) + + self.test_umu_compat.joinpath( + os.environ["PROTONPATH"], "compatibilitytool.vdf" + ).write_text("foo") + + with patch.object(umu_proton, "unix_flock", return_value=mock_ctx): + result = umu_proton._get_delta( + self.env, + self.test_umu_compat, + mock_patch, + mock_assets, + self.test_session_pools, + ) + + self.assertTrue(result is self.env, f"Expected None, received {result}") + + mock_val = str( + self.test_umu_compat.joinpath(umu_proton.ProtonVersion.UMULatest.value) + ) + self.assertEqual( + os.environ["PROTONPATH"], + mock_val, + f"Expected {mock_val}, received {os.environ['PROTONPATH']}", + ) + self.assertEqual( + self.env["PROTONPATH"], + mock_val, + f"Expected {mock_val}, received {self.env['PROTONPATH']}", + ) + + def test_get_delta_cbor(self): + """Test get_delta when parsing CBOR.""" + mock_assets = (("foo", "foo"), ("foo.tar.gz", "foo")) + os.environ["PROTONPATH"] = umu_proton.ProtonVersion.UMULatest.value + + # If either cbor2 or the Rust module DNE, skip + try: + from cbor2 import dumps + except ModuleNotFoundError: + err = "python3-cbor2 not installed" + self.skipTest(err) + + if find_spec("umu_delta") is None: + err = "umu_delta module not compiled" + self.skipTest(err) + + mock_patch = dumps({"foo": "foo"}) + mock_ctx = MagicMock() + mock_ctx.__enter__ = MagicMock(return_value=None) + mock_ctx.__exit__ = MagicMock(return_value=None) + + self.test_umu_compat.joinpath(os.environ["PROTONPATH"]).mkdir( + parents=True, exist_ok=True + ) + + with patch.object(umu_proton, "unix_flock", return_value=mock_ctx): + result = umu_proton._get_delta( + self.env, + self.test_umu_compat, + mock_patch, + mock_assets, + self.test_session_pools, + ) + self.assertTrue(result is None, f"Expected None, received {result}") + + def test_get_delta_cbor_err(self): + """Test get_delta when parsing invalid CBOR.""" + mock_patch = b"foo" + mock_assets = (("foo", "foo"), ("foo", "foo")) + os.environ["PROTONPATH"] = umu_proton.ProtonVersion.UMULatest.value + + if find_spec("cbor2") is None: + err = "umu_delta module not compiled" + self.skipTest(err) + + if find_spec("umu_delta") is None: + err = "umu_delta module not compiled" + self.skipTest(err) + + result = umu_proton._get_delta( + self.env, + self.test_umu_compat, + mock_patch, + mock_assets, + self.test_session_pools, + ) + self.assertTrue(result is None, f"Expected None, received {result}") + + def test_get_delta_no_latest(self): + """Test get_delta when parsing invalid CBOR.""" + mock_patch = b"foo" + mock_assets = (("foo", "foo"), ("foo", "foo")) + # Empty string is not a valid code name + os.environ["PROTONPATH"] = "" + + result = umu_proton._get_delta( + self.env, + self.test_umu_compat, + mock_patch, + mock_assets, + self.test_session_pools, + ) + self.assertTrue(result is None, f"Expected None, received {result}") + + def test_get_delta_no_patch(self): + """Test get_delta for empty or absent patch data.""" + mock_patch = b"" + mock_assets = (("foo", "foo"), ("foo", "foo")) + os.environ["PROTONPATH"] = umu_proton.ProtonVersion.UMULatest.value + + result = umu_proton._get_delta( + self.env, + self.test_umu_compat, + mock_patch, + mock_assets, + self.test_session_pools, + ) + self.assertTrue(result is None, f"Expected None, received {result}") + + def test_get_delta_no_assets(self): + """Test get_delta when no GH assets are returned.""" + mock_patch = b"" + mock_assets = () + + result = umu_proton._get_delta( + self.env, + self.test_umu_compat, + mock_patch, + mock_assets, + self.test_session_pools, + ) + self.assertTrue(result is None, f"Expected None, received {result}") + def test_main_nomusl(self): """Test __main__.main to ensure an exit when on a musl-based system.""" os.environ["LD_LIBRARY_PATH"] = f"{os.environ['LD_LIBRARY_PATH']}:musl" @@ -1043,31 +1309,6 @@ def test_fetch_releases(self): f"Expected tuple with len, received len {result_len}", ) - def test_update_proton(self): - """Test _update_proton.""" - mock_protons = [Path(mkdtemp()), Path(mkdtemp())] - thread_pool = ThreadPoolExecutor() - result = [] - - for mock in mock_protons: - self.assertTrue(mock.is_dir(), f"Directory '{mock}' does not exist") - - result = umu_proton._update_proton(mock_protons, thread_pool) - - self.assertTrue(result is None, f"Expected None, received '{result}'") - - # The directories should be removed after the update - for mock in mock_protons: - self.assertFalse(mock.is_dir(), f"Directory '{mock}' still exist") - - def test_update_proton_empty(self): - """Test _update_proton when passed an empty list.""" - # In the real usage, an empty list means that there were no - # UMU/ULWGL-Proton found in compatibilitytools.d - result = umu_proton._update_proton([], None) - - self.assertTrue(result is None, "Expected None when passed an empty list") - def test_ge_proton(self): """Test check_env when the code name GE-Proton is set for PROTONPATH. @@ -1081,7 +1322,7 @@ def test_ge_proton(self): self.assertRaises(FileNotFoundError), patch.object(umu_proton, "_fetch_releases", return_value=None), patch.object(umu_proton, "_get_latest", return_value=None), - patch.object(umu_proton, "_get_from_steamcompat", return_value=None), + patch.object(umu_proton, "_get_from_compat", return_value=None), ): os.environ["WINEPREFIX"] = self.test_file os.environ["GAMEID"] = self.test_file @@ -1103,17 +1344,17 @@ def test_ge_proton_none(self): Tests the case when the user has no internet connection or GE-Proton wasn't found in local system. """ + mock_session_pools = (MagicMock(), MagicMock()) with ( self.assertRaises(FileNotFoundError), patch.object(umu_proton, "_fetch_releases", return_value=None), patch.object(umu_proton, "_get_latest", return_value=None), - patch.object(umu_proton, "_get_from_steamcompat", return_value=None), - ThreadPoolExecutor() as thread_pool, + patch.object(umu_proton, "_get_from_compat", return_value=None), ): os.environ["WINEPREFIX"] = self.test_file os.environ["GAMEID"] = self.test_file os.environ["PROTONPATH"] = "GE-Proton" - umu_run.check_env(self.env, thread_pool) + umu_run.check_env(self.env, mock_session_pools) self.assertFalse(os.environ.get("PROTONPATH"), "Expected empty string") def test_latest_interrupt(self): @@ -1130,6 +1371,7 @@ def test_latest_interrupt(self): # In this case, assume the test variable will be downloaded files = (("", ""), (self.test_archive.name, "")) tmpdirs = (self.test_cache, self.test_cache_home) + compats = (self.test_umu_compat, self.test_compat) # Mock the context manager object that creates the file lock mock_ctx = MagicMock() @@ -1146,7 +1388,7 @@ def test_latest_interrupt(self): mock_function.side_effect = KeyboardInterrupt result = umu_proton._get_latest( self.env, - self.test_compat, + compats, tmpdirs, files, self.test_session_pools, @@ -1169,6 +1411,7 @@ def test_latest_val_err(self): # internet) files = (("", ""), (self.test_archive.name, "")) tmpdirs = (self.test_cache, self.test_cache_home) + compats = (self.test_umu_compat, self.test_compat) # Mock the context manager object that creates the file lock mock_ctx = MagicMock() @@ -1189,7 +1432,7 @@ def test_latest_val_err(self): mock_function.side_effect = ValueError result = umu_proton._get_latest( self.env, - self.test_compat, + compats, tmpdirs, files, self.test_session_pools, @@ -1206,6 +1449,7 @@ def test_latest_offline(self): # internet) files = () tmpdirs = (self.test_cache, self.test_cache_home) + compats = (self.test_umu_compat, self.test_compat) # Mock the context manager object that creates the file lock mock_ctx = MagicMock() @@ -1221,7 +1465,7 @@ def test_latest_offline(self): ): result = umu_proton._get_latest( self.env, - self.test_compat, + compats, tmpdirs, files, self.test_session_pools, @@ -1229,75 +1473,6 @@ def test_latest_offline(self): self.assertFalse(self.env["PROTONPATH"], "Expected PROTONPATH to be empty") self.assertFalse(result, "Expected None to be returned from _get_latest") - def test_link_umu(self): - """Test _get_latest for recreating the UMU-Latest link. - - This link should always be recreated to ensure clients can reliably - kill the wineserver process for the current prefix - - In the real usage, this will fail if the user already has a UMU-Latest - directory for some reason or the link somehow gets deleted after it - gets recreated by the launcher - """ - result = None - latest = Path("UMU-Proton-9.0-beta15") - latest.mkdir() - Path(f"{latest}.sha512sum").touch() - files = ((f"{latest}.sha512sum", ""), (f"{latest}.tar.gz", "")) - tmpdirs = (self.test_cache, self.test_cache_home) - - # Mock the context manager object that creates the file lock - mock_ctx = MagicMock() - mock_ctx.__enter__ = MagicMock(return_value=None) - mock_ctx.__exit__ = MagicMock(return_value=None) - - # Mock the latest Proton in /tmp - test_archive = self.test_cache.joinpath(f"{latest}.tar.gz") - with tarfile.open(test_archive.as_posix(), "w:gz") as tar: - tar.add(latest.as_posix(), arcname=latest.as_posix()) - - # UMU-Latest will not exist in this installation - self.test_compat.joinpath("UMU-Proton-9.0-beta15").mkdir() - - os.environ["PROTONPATH"] = "" - - self.assertFalse( - self.test_compat.joinpath("UMU-Latest").exists(), - "Expected UMU-Latest link to not exist", - ) - with ( - patch("umu.umu_proton._fetch_proton"), - ThreadPoolExecutor(), - patch.object(umu_proton, "unix_flock", return_value=mock_ctx), - ): - result = umu_proton._get_latest( - self.env, - self.test_compat, - tmpdirs, - files, - self.test_session_pools, - ) - self.assertTrue(result is self.env, "Expected the same reference") - # Verify the latest was set - self.assertEqual( - self.env.get("PROTONPATH"), - self.test_compat.joinpath(latest).as_posix(), - "Expected latest to be set", - ) - self.assertTrue( - self.test_compat.joinpath("UMU-Latest").is_symlink(), - "Expected UMU-Latest symlink", - ) - # Verify link - self.assertEqual( - self.test_compat.joinpath("UMU-Latest").readlink(), - latest, - f"Expected UMU-Latest link to be ./{latest}", - ) - - latest.rmdir() - Path(f"{latest}.sha512sum").unlink() - def test_latest_umu(self): """Test _get_latest when online and when an empty PROTONPATH is set. @@ -1310,6 +1485,7 @@ def test_latest_umu(self): Path(f"{latest}.sha512sum").touch() files = ((f"{latest}.sha512sum", ""), (f"{latest}.tar.gz", "")) tmpdirs = (self.test_cache, self.test_cache_home) + compats = (self.test_umu_compat, self.test_compat) # Mock the context manager object that creates the file lock mock_ctx = MagicMock() @@ -1343,7 +1519,7 @@ def test_latest_umu(self): ): result = umu_proton._get_latest( self.env, - self.test_compat, + compats, tmpdirs, files, (thread_pool, MagicMock()), @@ -1355,19 +1531,6 @@ def test_latest_umu(self): self.test_compat.joinpath(latest).as_posix(), "Expected latest to be set", ) - # Verify that the old versions were deleted - self.assertFalse( - self.test_compat.joinpath("UMU-Proton-9.0-beta15").exists(), - "Expected old version to be removed", - ) - self.assertFalse( - self.test_compat.joinpath("UMU-Proton-9.0-beta14").exists(), - "Expected old version to be removed", - ) - self.assertFalse( - self.test_compat.joinpath("ULWGL-Proton-8.0-5-2").exists(), - "Expected old version to be removed", - ) # Verify foo files survived self.assertTrue( self.test_compat.joinpath("foo").exists(), @@ -1377,35 +1540,27 @@ def test_latest_umu(self): self.test_compat.joinpath("GE-Proton9-2").exists(), "Expected GE-Proton9-2 to survive", ) - self.assertTrue( - self.test_compat.joinpath("UMU-Latest").is_symlink(), - "Expected UMU-Latest symlink", - ) - # Verify link - self.assertEqual( - self.test_compat.joinpath("UMU-Latest").readlink(), - latest, - f"Expected UMU-Latest link to be ./{latest}", - ) latest.rmdir() Path(f"{latest}.sha512sum").unlink() def test_steamcompat_nodir(self): - """Test _get_from_steamcompat when Proton doesn't exist in compat dir. + """Test _get_from_compat when Proton doesn't exist in compat dir. In this case, None should be returned to signal that we should continue with downloading the latest Proton """ result = None - result = umu_proton._get_from_steamcompat(self.env, self.test_compat) + result = umu_proton._get_from_compat( + self.env, (self.test_umu_compat, self.test_compat) + ) - self.assertFalse(result, "Expected None after calling _get_from_steamcompat") + self.assertFalse(result, "Expected None after calling _get_from_compat") self.assertFalse(self.env["PROTONPATH"], "Expected PROTONPATH to not be set") def test_steamcompat(self): - """Test _get_from_steamcompat. + """Test _get_from_compat. When a Proton exist in .local/share/Steam/compatibilitytools.d, use it when PROTONPATH is unset @@ -1415,7 +1570,9 @@ def test_steamcompat(self): umu_util.extract_tarfile(self.test_archive, self.test_archive.parent) move(str(self.test_archive).removesuffix(".tar.gz"), self.test_compat) - result = umu_proton._get_from_steamcompat(self.env, self.test_compat) + result = umu_proton._get_from_compat( + self.env, (self.test_umu_compat, self.test_compat) + ) self.assertTrue(result is self.env, "Expected the same reference") self.assertEqual( diff --git a/umu/umu_util.py b/umu/umu_util.py index 35b22c017..24e174581 100644 --- a/umu/umu_util.py +++ b/umu/umu_util.py @@ -1,17 +1,19 @@ import os import sys +from collections.abc import Generator from contextlib import contextmanager from ctypes.util import find_library from fcntl import LOCK_EX, LOCK_UN, flock from functools import lru_cache from hashlib import new as hashnew -from io import BufferedIOBase +from io import BufferedIOBase, BufferedRandom from pathlib import Path from re import Pattern from re import compile as re_compile from shutil import which from subprocess import PIPE, STDOUT, Popen, TimeoutExpired from tarfile import open as taropen +from typing import Any from urllib3.response import BaseHTTPResponse from Xlib import display @@ -36,6 +38,21 @@ def unix_flock(path: str): os.close(fd) +@contextmanager +def memfdfile(name: str) -> Generator[BufferedRandom, Any, None]: + """Create an anonymous file.""" + fp: BufferedRandom | None = None + + try: + fd = os.memfd_create(name, os.MFD_CLOEXEC) + os.set_inheritable(fd, True) + fp = os.fdopen(fd, mode="rb+") + yield fp + finally: + if fp is not None: + fp.close() + + @lru_cache def get_libc() -> str: """Find libc.so from the user's system."""