From 8f940c9e87177b50793a92a2fa82d16c897ad509 Mon Sep 17 00:00:00 2001 From: Daniel Richard G Date: Tue, 28 Nov 2023 23:22:56 -0500 Subject: [PATCH] WIP: Tooling to cross-compile Chromium for Windows from Linux --- .github/workflows/build-images.yml | 133 ++++++ .github/workflows/cross-build.yml | 229 +++++++++ cross-build/Dockerfile.base | 33 ++ cross-build/Dockerfile.msvc | 18 + cross-build/Makefile | 295 ++++++++++++ cross-build/README.md | 84 ++++ cross-build/base-setup.sh | 172 +++++++ cross-build/build.sh | 439 ++++++++++++++++++ cross-build/case-fold.sh | 317 +++++++++++++ cross-build/ccache.conf | 9 + cross-build/gen-setenv.py | 168 +++++++ cross-build/gh-unburden.sh | 41 ++ cross-build/rootfs-sums.sh | 47 ++ flags.windows.gn | 1 + package.py | 21 +- patches/series | 3 + .../windows-compile-mini-installer.patch | 18 +- .../windows/windows-disable-rcpy.patch | 20 +- .../windows-disable-win-build-output.patch | 19 +- .../windows-fix-bindgen-libclang-path.patch | 26 ++ .../windows/windows-fix-building-gn.patch | 30 +- ...s-fix-generate-resource-allowed-list.patch | 21 +- .../windows-fix-licenses-gn-path.patch | 15 +- .../windows/windows-run-midl-via-wine.patch | 93 ++++ .../windows/windows-use-system-tools.patch | 83 ++++ skunk-tmp/ghci-strategy.sh | 229 +++++++++ skunk-tmp/ninja-compdb-extract.py | 64 +++ 27 files changed, 2578 insertions(+), 50 deletions(-) create mode 100644 .github/workflows/build-images.yml create mode 100644 .github/workflows/cross-build.yml create mode 100644 cross-build/Dockerfile.base create mode 100644 cross-build/Dockerfile.msvc create mode 100644 cross-build/Makefile create mode 100644 cross-build/README.md create mode 100644 cross-build/base-setup.sh create mode 100755 cross-build/build.sh create mode 100755 cross-build/case-fold.sh create mode 100644 cross-build/ccache.conf create mode 100755 cross-build/gen-setenv.py create mode 100755 cross-build/gh-unburden.sh create mode 100755 cross-build/rootfs-sums.sh create mode 100644 patches/ungoogled-chromium/windows/windows-fix-bindgen-libclang-path.patch create mode 100644 patches/ungoogled-chromium/windows/windows-run-midl-via-wine.patch create mode 100644 patches/ungoogled-chromium/windows/windows-use-system-tools.patch create mode 100755 skunk-tmp/ghci-strategy.sh create mode 100755 skunk-tmp/ninja-compdb-extract.py diff --git a/.github/workflows/build-images.yml b/.github/workflows/build-images.yml new file mode 100644 index 00000000..7662069b --- /dev/null +++ b/.github/workflows/build-images.yml @@ -0,0 +1,133 @@ +name: Build container images for cross build + +on: + workflow_dispatch: + inputs: + upload: + type: boolean + description: "Upload to registry" + default: false + +env: + # used in cross-build/Makefile + APT_MIRROR: azure.archive.ubuntu.com + IMAGE_SOURCE: ${{github.server_url}}/${{github.repository}} + +jobs: + main: + runs-on: ubuntu-24.04 + permissions: + # needed to upload container images + packages: write + steps: + + - name: Clone u-c-w Git repository + uses: actions/checkout@v4 + + - name: Clone msvc-wine Git repository + uses: actions/checkout@v4 + with: + repository: mstorsjo/msvc-wine + ref: 209623ed118aac0121e63a7e86e467c238516f5a # 20240912 + path: cross-build/msvc-wine + + - name: Free up disk space + run: cd / && sudo $GITHUB_WORKSPACE/cross-build/gh-unburden.sh + + - name: Adjust APT config + run: | + sudo tee /etc/apt/apt.conf.d/95custom << END + # Don't install recommended packages + APT::Install-Recommends "0"; + # Don't use "Reading database ... X%" progress indicator + Dpkg::Use-Pty "false"; + END + + - name: Build base container image + run: | + cd cross-build + make build-image-base \ + BUILD_UID=$(id -u) \ + MULTI_ARCH=1 + + - name: Install packages required for extracting the MSVC files + run: sudo apt-get -y install msitools + + - name: Restore MSVC download cache + id: restore-msvc + uses: actions/cache/restore@v4 + with: + key: msvc-download + path: cross-build/msvc-cache + + - name: Build MSVC container image + run: | + cd cross-build + make build-image \ + MSVC_ACCEPT_LICENSE=--accept-license \ + MULTI_ARCH=1 + + - name: Save MSVC download cache + if: ${{!steps.restore-msvc.outputs.cache-hit}} + uses: actions/cache/save@v4 + with: + key: msvc-download + path: cross-build/msvc-cache + + - name: Get date-based version tag for images + id: version + run: | + vtag=$(date '+%Y%m%d') + echo "Image version tag: $vtag" + echo "tag=$vtag" >> $GITHUB_OUTPUT + + - name: Log in to GitHub Container Registry + if: inputs.upload + env: + GITHUB_ACTOR: ${{github.actor}} + GITHUB_TOKEN: ${{github.token}} + run: docker login ghcr.io --username $GITHUB_ACTOR --password-stdin <<<$GITHUB_TOKEN + + # Note: Ensure that the GitHub repo has "Role: Write" access to + # chromium-win-cross{,-base} under "Package settings -> Manage + # Actions access", or else the "docker push" operation will fail. + + - name: Upload base container image to registry + if: inputs.upload + run: | + remote_name=ghcr.io/${{github.repository_owner}}/chromium-win-cross-base + set -x + docker tag chromium-win-cross-base $remote_name:${{steps.version.outputs.tag}} + docker tag chromium-win-cross-base $remote_name:latest + docker push $remote_name:${{steps.version.outputs.tag}} + docker push $remote_name:latest + + - name: Upload MSVC container image to registry + if: inputs.upload + run: | + remote_name=ghcr.io/${{github.repository_owner}}/chromium-win-cross + set -x + docker tag chromium-win-cross $remote_name:${{steps.version.outputs.tag}} + docker tag chromium-win-cross $remote_name:latest + docker push $remote_name:${{steps.version.outputs.tag}} + docker push $remote_name:latest + docker logout ghcr.io + + - name: Prepare image metadata + run: | + mkdir artifact + set -x + cp -p cross-build/MD5SUMS.rootfs artifact/ + cp -p cross-build/winsysroot/.vsdownload/MD5SUMS.cache artifact/MD5SUMS.msvc-cache + cp -p cross-build/winsysroot/.vsdownload/*.manifest.xz artifact/ + xz -d artifact/*.xz + docker container run --rm chromium-win-cross dpkg-query --show > artifact/dpkg-packages.txt + + - name: Archive image metadata + uses: actions/upload-artifact@v4 + with: + name: image-info + compression-level: 9 + path: artifact/ + +# EOF diff --git a/.github/workflows/cross-build.yml b/.github/workflows/cross-build.yml new file mode 100644 index 00000000..51511774 --- /dev/null +++ b/.github/workflows/cross-build.yml @@ -0,0 +1,229 @@ +--- +name: Cross-build ungoogled-chromium + +on: + workflow_dispatch: + inputs: + container_image: + description: Container image for build + default: chromium-win-cross:latest + target-cpu: + description: Target CPU + type: choice + options: [x64, x86, arm64] + default: x64 + debug: + description: Enable debugging + type: boolean + default: false + +env: + ZSTD_NBTHREADS: 0 + +jobs: + + stage-1: + runs-on: ubuntu-24.04 + container: + image: ghcr.io/${{github.repository_owner}}/${{inputs.container_image}} + options: -v /:/HOST + steps: + + - name: Free up disk space + run: cd /HOST && sudo /usr/local/sbin/gh-unburden + + - name: Clone u-c-w Git repository + uses: actions/checkout@v4 + with: + submodules: recursive + fetch-depth: 50 + fetch-tags: true + + - name: Get Chromium version info + id: chromium + run: | + version=$(cat ungoogled-chromium/chromium_version.txt) + echo "Chromium version: $version" + echo "version=$version" >> $GITHUB_OUTPUT + + - name: Restore Chromium source tarball download cache + id: restore-cache + uses: actions/cache/restore@v4 + with: + key: chromium-source-${{steps.chromium.outputs.version}} + path: cross-build/build/download_cache + + - name: Download and/or verify Chromium source tarball + run: | + cache=cross-build/build/download_cache + mkdir -p $cache + ungoogled-chromium/utils/downloads.py retrieve \ + --ini ungoogled-chromium/downloads.ini \ + --cache $cache \ + --hide-progress-bar + ls -l $cache + + - name: Save Chromium source tarball download cache + if: ${{!steps.restore-cache.outputs.cache-hit}} + uses: actions/cache/save@v4 + with: + key: chromium-source-${{steps.chromium.outputs.version}} + path: cross-build/build/download_cache + + - name: Prepare the build + run: cd cross-build && ./build.sh --ci --${{inputs.target-cpu}} ${{inputs.debug && '--debug' || ''}} + + - name: Prepare GHCI build strategy + run: | + cd cross-build/build/src/out/Default* + targets=$(cat build.targets) + $GITHUB_WORKSPACE/skunk-tmp/ghci-strategy.sh 8 $targets + + - name: Delete the download cache + run: rm -r cross-build/build/download_cache + + - name: Stage 1 build (${{inputs.target-cpu}}) + run: | + cd cross-build/build/src + ninja -C out/Default* -f ghci-stage1.ninja ghci-stage1 + + - name: Tar up the workspace + # The "touch" prevents a "tar: .: file changed as we read it" error + run: | + touch stage1.tar.zstd + tar cf stage1.tar.zstd --zstd --exclude=stage1.tar.zstd . + ls -lh stage1.tar.zstd + + - name: Save the workspace for stage 2 + uses: actions/upload-artifact@v4 + with: + name: stage1 + compression-level: 0 + path: stage1.tar.zstd + if-no-files-found: error + retention-days: 1 + + stage-2: + needs: [stage-1] + runs-on: ubuntu-24.04 + strategy: + matrix: + part: [part1, part2, part3, part4, part5, part6, part7, part8] + container: + image: ghcr.io/${{github.repository_owner}}/${{inputs.container_image}} + options: -v /:/HOST + env: + PART: ${{matrix.part}} + steps: + + - name: Free up disk space + run: cd /HOST && sudo /usr/local/sbin/gh-unburden + + - name: Download workspace tarball from stage 1 + uses: actions/download-artifact@v4 + with: + name: stage1 + + # The find/truncate gets rid of the bulk of redundant files under + # obj/, while keeping them as placeholders to avoid recompilation + - name: Unpack workspace tarball + run: | + tar xf stage1.tar.zstd --zstd + rm stage1.tar.zstd + find cross-build/build/src/out/Default*/obj \ + -type f -exec truncate -s 0 {} + + + - name: Stage 2 build (${{matrix.part}}, ${{inputs.target-cpu}}) + run: | + cd cross-build/build/src + ninja -C out/Default* -f ghci-stage2.ninja $PART + + - name: Tar up the partial build tree + run: | + tar cf stage2-$PART.tar.zstd --zstd cross-build/build/src/out/Default*/obj + ls -lh stage2-$PART.tar.zstd + + - name: Save the partial tree for stage 3 + uses: actions/upload-artifact@v4 + with: + name: stage2-${{matrix.part}} + compression-level: 0 + path: stage2-${{matrix.part}}.tar.zstd + if-no-files-found: error + retention-days: 1 + + stage-3: + needs: [stage-2] + runs-on: ubuntu-24.04 + container: + image: ghcr.io/${{github.repository_owner}}/${{inputs.container_image}} + options: -v /:/HOST + steps: + + - name: Free up disk space + run: cd /HOST && sudo /usr/local/sbin/gh-unburden + + - name: Download the workspace and partial build tree tarballs + uses: actions/download-artifact@v4 + + - name: Unpack the tarballs + run: | + for tarball in \ + stage1/stage1.tar.zstd \ + stage2-part*/stage2-part*.tar.zstd + do + echo "Unpacking $tarball ..." + tar xf $tarball --zstd --skip-old-files + rm $tarball + done + rmdir stage1 stage2-part* + + - name: Stage 3 build (${{inputs.target-cpu}}) + run: cd cross-build && ./build.sh --${{inputs.target-cpu}} ${{inputs.debug && '--debug' || ''}} + + - name: Archive build outputs + uses: actions/upload-artifact@v4 + with: + name: packages-${{inputs.target-cpu}} + compression-level: 0 + path: cross-build/build/ungoogled-chromium_* + if-no-files-found: error + + - name: Archive reproducibility info + uses: actions/upload-artifact@v4 + with: + name: reproduce + compression-level: 9 + path: cross-build/build/MD5SUMS*.repro + if-no-files-found: error + + cleanup: + if: always() + needs: [stage-3] + runs-on: ubuntu-24.04 + permissions: + actions: write + steps: + - name: Delete temporary artifacts + env: + GH_TOKEN: ${{github.token}} + run: | + gh_api_call() + { + gh api $2 $3 \ + -H 'Accept: application/vnd.github+json' \ + -H 'X-GitHub-Api-Version: 2022-11-28' \ + "/repos/$GITHUB_REPOSITORY/actions/$1" + } + gh_api_call "runs/$GITHUB_RUN_ID/artifacts" \ + | jq -r '.artifacts[] | (.id|tostring)+"\t"+.name' \ + | grep stage \ + > artifacts.txt || true + echo "Found $(wc -l < artifacts.txt) artifact(s) to delete." + while read id name + do + echo "Deleting artifact \"$name\" (id=$id)" + gh_api_call "artifacts/$id" --method DELETE + done < artifacts.txt + +# EOF diff --git a/cross-build/Dockerfile.base b/cross-build/Dockerfile.base new file mode 100644 index 00000000..1c7a439b --- /dev/null +++ b/cross-build/Dockerfile.base @@ -0,0 +1,33 @@ +# Dockerfile.base + +FROM ubuntu:noble + +LABEL org.opencontainers.image.description="Base image for building chromium-win-cross" + +ARG APT_MIRROR=mirrors.wikimedia.org +ARG BUILD_UID=1024 +ARG MULTI_ARCH= + +ADD ccache.conf /etc/ +ADD gh-unburden.sh /usr/local/sbin/gh-unburden +ADD rootfs-sums.sh /usr/local/sbin/rootfs-sums +ADD rc.cc /usr/local/src/ + +ADD llvm /opt/llvm + +ADD rust /opt/rust + +# Most setup is performed in this script +RUN --mount=type=bind,source=base-setup.sh,target=/tmp/base-setup.sh \ + sh /tmp/base-setup.sh + +# Prevents "error: the option `Z` is only accepted on the nightly compiler" +ENV RUSTC_BOOTSTRAP=1 + +WORKDIR /home/build +USER build + +# Avoid .pyc files as they don't help reproducibility +ENV PYTHONDONTWRITEBYTECODE=y + +# end Dockerfile.base diff --git a/cross-build/Dockerfile.msvc b/cross-build/Dockerfile.msvc new file mode 100644 index 00000000..8d4f82ed --- /dev/null +++ b/cross-build/Dockerfile.msvc @@ -0,0 +1,18 @@ +# Dockerfile.msvc + +# This adds the MSVC toolchain to the base image. Because MSVC is +# proprietary software, the resulting image cannot be redistributed. +# Please ensure that the image remains private in whichever container +# registry it is uploaded to. + +FROM chromium-win-cross-base +#FROM ghcr.io/ungoogled-software/chromium-win-cross-base + +LABEL org.opencontainers.image.description="Chromium browser cross-build environment (Linux to Microsoft Windows)" + +ADD winsysroot /opt/microsoft + +ENV GYP_MSVS_OVERRIDE_PATH=/opt/microsoft \ + WINDOWSSDKDIR="/opt/microsoft/Windows Kits/10" + +# end Dockerfile.msvc diff --git a/cross-build/Makefile b/cross-build/Makefile new file mode 100644 index 00000000..855edcc9 --- /dev/null +++ b/cross-build/Makefile @@ -0,0 +1,295 @@ +# Makefile + +IMAGE_NAME = chromium-win-cross +BASE_IMAGE_NAME = $(IMAGE_NAME)-base +CONTAINER_NAME = $(IMAGE_NAME)-con + +# Uncomment to add support for x86 (32-bit) and ARM64 builds +#MULTI_ARCH = 1 + +ifdef GITHUB_WORKSPACE +GH_ECHO = @echo +else +GH_ECHO = @true +endif + +extra_run_args = \ + --network=host \ + --tmpfs /external/tmp:exec + +run: + docker container run -it --rm \ + --name=$(CONTAINER_NAME) \ + --hostname=$(CONTAINER_NAME) \ + $(extra_run_args) \ + $(IMAGE_NAME) + +run-extra: + docker container exec -it $(CONTAINER_NAME) /bin/bash + +CONTEXT = tmp.context + +build-image: Dockerfile.msvc winsysroot.stamp + $(GH_ECHO) '::group::Build container image' + rm -rf $(CONTEXT) + mkdir $(CONTEXT) + cp -al winsysroot/ $(CONTEXT) + docker build \ + --file $< \ + --tag $(IMAGE_NAME) \ + --progress plain \ + $(if $(IMAGE_SOURCE),--label org.opencontainers.image.source=$(IMAGE_SOURCE)) \ + $(CONTEXT) + rm -rf $(CONTEXT) + docker container run --rm \ + --user=root \ + --cap-add=SYS_ADMIN \ + --security-opt=apparmor=unconfined \ + $(IMAGE_NAME) \ + rootfs-sums generate \ + > MD5SUMS.rootfs + $(GH_ECHO) '::endgroup::' + +image_base_deps = \ + base-setup.sh \ + ccache.conf \ + gh-unburden.sh \ + rc.cc \ + rootfs-sums.sh + +build-image-base: Dockerfile.base llvm.stamp rust.stamp $(image_base_deps) + $(GH_ECHO) '::group::Build base container image' + rm -rf $(CONTEXT) + mkdir $(CONTEXT) + cp -p $(image_base_deps) $(CONTEXT) + cp -al llvm/ $(CONTEXT) + cp -al rust/ $(CONTEXT) + docker build \ + --file $< \ + --tag $(BASE_IMAGE_NAME) \ + --progress plain \ + $(if $(IMAGE_SOURCE),--label org.opencontainers.image.source=$(IMAGE_SOURCE)) \ + $(if $(APT_MIRROR),--build-arg APT_MIRROR=$(APT_MIRROR)) \ + $(if $(BUILD_UID),--build-arg BUILD_UID=$(BUILD_UID)) \ + $(if $(MULTI_ARCH),--build-arg MULTI_ARCH=$(MULTI_ARCH)) \ + $(CONTEXT) + rm -rf $(CONTEXT) + $(GH_ECHO) '::endgroup::' + +verify-image: MD5SUMS.rootfs + docker container run -i --rm \ + --user=root \ + --cap-add=SYS_ADMIN \ + --security-opt=apparmor:unconfined \ + $(IMAGE_NAME) \ + rootfs-sums verify \ + < $< + +#### Google tools + +# Commit from 2024 Oct 14 +RC_GIT_REVISION = 4156984e47724ae6a522c9e7af3d5fbb97a876f1 +#RC_GIT_REVISION = main + +# Portable resource compiler re-implementation, see +# build/toolchain/win/rc/README.md +# (llvm-rc does not yet appear to be usable for the Chromium build) +# +# View source in context: +# https://github.com/nico/hack/blob/main/res/rc.cc +# +rc.cc: + $(GH_ECHO) '::group::Download Google RC' + wget -nv https://raw.githubusercontent.com/nico/hack/$(RC_GIT_REVISION)/res/rc.cc + test -s $@ + $(GH_ECHO) '::endgroup::' + +#### LLVM toolchain/libraries setup + +LLVM_VERSION = 19.1.2 + +LLVM_URL = https://github.com/llvm/llvm-project/releases/download/llvmorg-$(LLVM_VERSION)/LLVM-$(LLVM_VERSION)-Linux-X64.tar.xz + +llvm.stamp: + $(GH_ECHO) '::group::Prepare LLVM installation' + wget -nv -c $(LLVM_URL) + rm -rf llvm llvm-tmp + mkdir llvm-tmp + tar xJf LLVM-*.tar.xz -C llvm-tmp + mv -v llvm-tmp/LLVM-* llvm + rm -r llvm-tmp +# Always link the C++ runtime libraries statically + for lib in llvm/lib/*/libc++.so; do \ + grep -Iq '^INPUT' $$lib || exit; \ + mv -f $$lib $$lib.orig; \ + echo 'INPUT(libc++.a libc++abi.a libunwind.a)' > $$lib; \ + echo "rewrote '$$lib'"; \ + done + touch $@ + $(GH_ECHO) '::endgroup::' + +#### Rust toolchain/libraries setup + +# Debian/Ubuntu don't package the Rust standard libraries needed for +# cross-compilation to Windows/MSVC, and the packaged Rust compiler will +# reject any libraries we download with error E0514, so install the whole +# Rust toolchain and set of libraries using the "rustup" utility. + +RUST_VERSION = 1.84.0 +#RUST_VERSION = nightly-2024-02-14 + +BINDGEN_VERSION = 0.71.1 + +RUSTUP_URL = https://static.rust-lang.org/rustup/dist/x86_64-unknown-linux-gnu/rustup-init + +.SECONDARY: rustup-init +rustup-init: + $(GH_ECHO) '::group::Download rustup' + wget -nv -c $(RUSTUP_URL) + chmod +x $@ + $(GH_ECHO) '::endgroup::' + +rust.stamp: rustup-init + $(GH_ECHO) '::group::Prepare Rust installation' + rm -rf rust rust-cargo.tmp + CARGO_HOME=rust/cargo \ + RUSTUP_HOME=rust/rustup \ + TERM=dumb \ + ./rustup-init -y --quiet \ + --profile minimal \ + --default-toolchain $(RUST_VERSION) \ + $(if $(MULTI_ARCH),--target i686-pc-windows-msvc) \ + $(if $(MULTI_ARCH),--target aarch64-pc-windows-msvc) \ + --target x86_64-pc-windows-msvc \ + --no-modify-path + cd rust && ln -s rustup/toolchains/* sysroot + CARGO_HOME=rust-cargo.tmp \ + RUSTC=rust/sysroot/bin/rustc \ + TERM=dumb \ + rust/sysroot/bin/cargo install --root rust/bindgen bindgen-cli@$(BINDGEN_VERSION) + rm -rf rust-cargo.tmp + touch $@ + $(GH_ECHO) '::endgroup::' + +#### Microsoft Windows SDK setup + +# Uncomment to accept the Microsoft EULA automatically +#MSVC_ACCEPT_LICENSE = --accept-license + +MSVC_PACKAGE_LIST = \ + Microsoft.VisualStudio.Component.VC.14.38.17.8.x86.x64 \ + Microsoft.VisualStudio.Component.VC.14.38.17.8.MFC \ + Win11SDK_10.0.22621 + +ifdef MULTI_ARCH +MSVC_PACKAGE_LIST += \ + Microsoft.VisualStudio.Component.VC.14.38.17.8.ARM64 \ + Microsoft.VisualStudio.Component.VC.14.38.17.8.MFC.ARM64 +endif + +MSVC_MANIFEST_FILE := $(wildcard msvc-cache/*.*.*.manifest) + +.SECONDARY: msvc-wine/vsdownload.py +msvc-wine/vsdownload.py: + git clone https://github.com/mstorsjo/msvc-wine.git + test -x $@ + +winsysroot.stamp: msvc-wine/vsdownload.py + $(GH_ECHO) '::group::Download and extract MSVC files' + rm -rf $@ winsysroot + msiextract --version + + $< \ + $(MSVC_ACCEPT_LICENSE) \ + --cache msvc-cache \ + --dest winsysroot \ + $(if $(MSVC_MANIFEST_FILE),--manifest $(MSVC_MANIFEST_FILE),--save-manifest) \ + $(MSVC_PACKAGE_LIST) + +ifeq "$(wildcard msvc-cache/MD5SUMS)" "" +# Generate hash sums for all downloaded files in the cache + (cd msvc-cache && find . -type f -printf '%P\n' \ + | LC_COLLATE=C sort \ + | xargs -d '\n' md5sum \ + ) > MD5SUMS.msvc-cache + mv MD5SUMS.msvc-cache msvc-cache/MD5SUMS +endif + +# Keep a copy of the manifest in the cache (if newly obtained) + $(if $(MSVC_MANIFEST_FILE),true,cp -pv *.*.*.manifest msvc-cache/) + + $(GH_ECHO) '::endgroup::' + $(GH_ECHO) '::group::Prepare MSVC installation' + +# Delete unpopulated MSVC tool dirs, as they will cause trouble if they +# are higher-versioned than the real one (see clang-cl's /vctoolsversion +# option, which we don't want to use) + for dir in winsysroot/VC/Tools/MSVC/*; do \ + test -d $$dir/include || rm -rv $$dir; \ + done + +ifndef MULTI_ARCH +# Delete directories specific to non-x64 architectures + find winsysroot -depth -type d \ + \( -iname x86 -o -iname Hostx86 -o \ + -iname arm -o -iname arm64 -o -iname HostArm64 \) \ + -printf "removing '%p'\\n" -exec rm -r {} + +endif # not MULTI_ARCH + +# Zap all the executables (except for CL+MIDL), since we won't need them + find winsysroot -type f -name \*.exe \ + \! -name cl.exe \! -name midl.exe \! -name midlc.exe \ + -printf "zapping '%p'\\n" -exec truncate -s 0 {} + + +# Generate environment files + ./gen-setenv.py winsysroot + +# Add symlinks for letter-case variations + ./case-fold.sh winsysroot + +ifndef MULTI_ARCH +# The Chromium build still checks for a handful of "x86" directories + for dir in \ + winsysroot/VC/Tools/MSVC/*/lib \ + winsysroot/VC/Tools/MSVC/*/atlmfc/lib \ + winsysroot/'Windows Kits'/*/Lib/*/um \ + winsysroot/'Windows Kits'/*/Lib/*/ucrt; \ + do \ + mkdir -v "$$dir/x86" || exit; \ + done +endif # not MULTI_ARCH + +# Save the manifest file, to allow repeating this download in the future + mkdir winsysroot/.vsdownload + cp -pv $(if $(MSVC_MANIFEST_FILE),$(MSVC_MANIFEST_FILE),*.*.*.manifest) \ + winsysroot/.vsdownload/ + xz -9 winsysroot/.vsdownload/*.manifest + +# Save the download cache hash sums, to verify future downloads + cp -p msvc-cache/MD5SUMS \ + winsysroot/.vsdownload/MD5SUMS.cache + + touch $@ + $(GH_ECHO) '::endgroup::' + +#### Miscellaneous + +clean: + rm -f LLVM-*.tar.xz + rm -f MD5SUMS.* + rm -f rc.cc + rm -f rustup-init + rm -rf $(CONTEXT) + rm -rf llvm llvm-tmp + rm -rf rust.stamp rust rust-cargo.tmp + rm -rf winsysroot.stamp winsysroot + -docker image rm $(IMAGE_NAME) + +clean-more: + rm -f *.manifest + rm -rf msvc-cache + +.PHONY: run run-extra build-image build-image-base verify-image \ + clean clean-more + +# end Makefile diff --git a/cross-build/README.md b/cross-build/README.md new file mode 100644 index 00000000..a4a0f129 --- /dev/null +++ b/cross-build/README.md @@ -0,0 +1,84 @@ +# Cross-compiling ungoogled-chromium for Windows + +This directory contains tooling to build ungoogled-chromium for Microsoft Windows using a containerized GNU/Linux build environment. This approach has numerous advantages over the typical Windows build process: + +* No Windows installation is needed; + +* The necessary Microsoft tools and libraries are freely downloadable (to be clear: no payment-encumbered software components are required); + +* The build is hermetic (i.e. isolated from the particulars of the host system on which it runs); + +* The build is reproducible (i.e. given the same input sources and libraries, the outputs will likewise be the same, regardless of who runs the build or when it is run); + +* Builds can target Windows on x64, x86, or ARM64 with equal ease. + + +## Requirements + +* Linux x86-64 (x64) environment (Ubuntu 24.04/noble is known to work) + +* Docker or Docker-compatible container host + +(Note: It may be possible to run through this process on Windows using [WSL](https://learn.microsoft.com/windows/wsl) and/or [Rancher Desktop](https://rancherdesktop.io/), but we have no information on this as yet.) + + +## Preparation + +Perform a Git clone of [this repository](https://github.com/ungoogled-chromium/ungoogled-chromium-windows), including submodules. Enter the `cross-build/` subdirectory. Many of the steps involved are covered by targets in `Makefile`. + + +### Building the container image + +The container for building ungoogled-chromium includes the Microsoft Windows SDK, which is proprietary software, and cannot be legally redistributed. You will thereby need to build the container image yourself. + +The easiest way is to run `make build-image`, which will prompt you to accept the Microsoft license, download the Windows components needed, and build the `chromium-win-cross` container image. + +By default, the image will support 64-bit (x64) builds only. To build a larger image that can support x86 (32-bit) and ARM64 builds as well, run `make build-image MULTI_ARCH=1`. + +If you wish to use the exact same version of the Windows SDK as an existing image, then you will need a copy of the `.manifest` file that was used to build said image. Place this file, uncompressed, in the `cross-build/` directory prior to building the image. It should have a name like `17.11.3.manifest`. + +The files downloaded from Microsoft will be stored in a `msvc-cache` subdirectory. If you wish to build the image again at a later time, then hanging on to this directory will save you the need to download over a gigabyte's worth of files again. + +Once the image build is complete, there will be a `MD5SUMS.rootfs` file that contains MD5 hash sums for every file in the image. You can use this file to compare your build of the image against someone else's. (Also see the `verify-image` target in the makefile.) + + +#### Base image (optional) + +Note that there is also a `chromium-win-cross-base` container image. This has everything that isn't from Microsoft, is freely redistributable, and is normally used as the basis for building `chromium-win-cross`. You can build the base image yourself if you like, with `make build-image-base`. + +The container has a regular (non-root) user, named `build`, for running the browser build. By default, its user ID is 1024. If you wish to use a different UID, specify it as e.g. `make build-image-base BUILD_UID=1234`. + +The base image is built on an official Ubuntu image, and the image build needs to download numerous Ubuntu packages. To keep the load down on the main Ubuntu package servers, the build uses a third-party mirror server. If there is a different mirror host that you would like to use, you can specify it with e.g. `make build-image-base APT_MIRROR=de.archive.ubuntu.com`. (If you wish to use the official servers, specify `APT_MIRROR=NONE`. Either way, the package indexes will be signature-verified.) + +The base image build accepts the same `MULTI_ARCH=1` parameter as described above. You will need to specify it if you wish to peform x86 or ARM64 builds. + + +### Building ungoogled-chromium + +You can start the container with `make run`. Additional shells in the same environment can be started with `make run-extra`. Note that when the first shell exits, the container and all its contents will be deleted! Please don't keep your only copy of any important work inside the container. (Of course, if you already have your own container workflow, then you make the rules.) + +You'll need a copy of this repository inside the container, be it a volume-mounted instance of your initial Git clone, or a new/separate one. Ensure that the Git submodule under `ungoogled-chromium/` is checked out as well; you should see e.g. `ungoogled-chromium/chromium_version.txt`. + +Enter the `cross-build/` subdirectory, and run `./build.sh --idle --tarball`. This will download the Chromium source tarball, unpack it, prune binary files, apply the ungoogled-chromium patches, and build the browser as a whole. Note that this script has other options; run `./build.sh --help` to see them. + +If the build is successful, then you will see two final output files named like the following: +``` +ungoogled-chromium_123.0.1234.123-1.1_installer_x64.exe +ungoogled-chromium_123.0.1234.123-1.1_windows_x64.zip +``` +Copy these files out of the container (see the `docker cp` command for one way of doing this), and they should work as expected on a compatible Microsoft Windows system. + + +## Notes + +* Google's [re-implementation](https://github.com/nico/hack/blob/main/res/rc.cc) of the `rc` resource compiler is installed under `/usr/local/bin/`. You'll find the source code in `/usr/local/src/`. + +* All the Microsoft SDK stuff is under `/opt/microsoft/`, and the Chromium-relevant environment variables pointing to it are set accordingly. + +* A non-distro-provided Rust toolchain is installed under `/opt/rust/`. (The distro's packaged Rust compiler does not work, unfortunately, due to Rust's unforgivingly strict ABI compatibility.) + +* The build requires Microsoft's `midl.exe` compiler, and this in turn depends on `cl.exe`. I am not aware of any viable alternatives for these. The image includes an installation of Wine to allow running them. + +* The scripts are reasonably commented to explain what's going on, so please feel free to read through them beforehand. + +* Please report any issues to the project's issue tracker [here](https://github.com/ungoogled-software/ungoogled-chromium-windows/issues). diff --git a/cross-build/base-setup.sh b/cross-build/base-setup.sh new file mode 100644 index 00000000..f420e149 --- /dev/null +++ b/cross-build/base-setup.sh @@ -0,0 +1,172 @@ +# base-setup.sh +# +# Setup script for container image build, run from Dockerfile.base +# + +set -e +export DEBIAN_FRONTEND=noninteractive + +test -n "$APT_MIRROR" +test -n "$BUILD_UID" +test -f /usr/local/src/rc.cc + +run() { + echo "+ $*" + env "$@" 2>&1 + echo +} + +run tee /etc/apt/apt.conf.d/95custom << END +# Don't install recommended packages +APT::Install-Recommends "0"; + +# Don't use "Reading database ... X%" progress indicator +Dpkg::Use-Pty "false"; +END + +# Set up APT package repositories +if [ "_$APT_MIRROR" != _NONE ] +then + run perl -pi \ + -e 's!deb.debian.org!!;' \ + -e 's!archive.ubuntu.com/ubuntu!/ubuntu!;' \ + -e 's!security.ubuntu.com/ubuntu!/ubuntu-security! if 0;' \ + -e "s!!$APT_MIRROR!;" \ + -e '/ \w+-(backports|security) / and s/^/#!#/' \ + /etc/apt/sources.list.d/*.sources \ + /etc/apt/sources.list +fi +run apt-get --error-on=any update + +# General build environment tooling +run apt-get -y install \ + 7zip \ + bubblewrap \ + ccache \ + file \ + git \ + less \ + nano \ + netcat-openbsd \ + procps \ + python3 \ + python3-httplib2 \ + quilt \ + rsync \ + time \ + unzip \ + wget \ + wine wine64 \ + xz-utils \ + zip \ + zstd + +# NOTE: LLVM is installed from an upstream binary tarball, Rust is +# installed via rustup, and generate-ninja is built from source, so +# don't install packages for those + +# Needed by Linux-side tooling +run apt-get -y install \ + libexpat1-dev \ + libglib2.0-dev \ + libkrb5-dev \ + libnss3-dev + +# XXX EXPERIMENTAL: +# Additional dependencies for use_v8_context_snapshot=true +run apt-get -y install \ + libgl-dev \ + libpci-dev \ + libx11-xcb-dev \ + libxext-dev \ + libxi-dev + +# Build tools +run apt-get -y install \ + gperf \ + ninja-build \ + nodejs + +# Runtime dependencies of the upstream LLVM binaries +run apt-get -y install \ + libncurses6 \ + libxml2 + +run apt-get -y install libgcc-14-dev + +if [ -n "$MULTI_ARCH" ] +then + # ARM64 cross libraries are not needed, only x86 + run apt-get -y install \ + lib32gcc-s1 \ + libc6-dev-i386-cross \ + libgcc-14-dev-i386-cross + + # Help Clang find the x86 headers + run ln -s ../i686-linux-gnu/include /usr/include/i386-linux-gnu +fi + +# Set up sudo(8) to allow running gh-unburden as root +run apt-get -y install sudo +# +# Note: The "!fqdn" bit is to avoid "sudo: unable to resolve host _____: +# Name or service not known" errors in the container +# +run tee /etc/sudoers.d/build << END +Defaults !fqdn +build ALL = NOPASSWD: /usr/local/sbin/gh-unburden "" +END + +# Clean up +run apt-get clean +rm -f /var/lib/apt/lists/*ubuntu* + +# Set up duplicate LLVM tree with ccache(1) support +(cd /opt/llvm/bin + for x in clang clang++ clang-cl clang-cpp; do test -L $x || exit; done + test ! -L clang-[1-9]* +) +mkdir /opt/llvm-ccache +(cd /opt/llvm-ccache && ln -s ../llvm/* .) +rm /opt/llvm-ccache/bin +mkdir /opt/llvm-ccache/bin +(cd /opt/llvm-ccache/bin + find ../../llvm/bin/* -type l -exec cp -d {} . \; -o -exec ln -s {} . \; + ln -sf ../../../usr/bin/ccache clang-[1-9]* +) + +# Compile the rc program +run /opt/llvm/bin/clang++ \ + -stdlib=libc++ \ + -std=c++14 \ + -fuse-ld=lld \ + -Wall \ + -Wno-c++11-narrowing \ + /usr/local/src/rc.cc \ + -o /usr/local/bin/rc + +# Create regular user for running the build +run useradd \ + --uid $BUILD_UID \ + --gid users \ + --no-user-group \ + --comment 'Build User' \ + --create-home \ + --key HOME_MODE=0755 \ + --shell /bin/bash \ + build + +# Zap unreproducible files +rm -f /var/cache/ldconfig/aux-cache +for file in \ + alternatives.log \ + apt/history.log \ + apt/term.log \ + dpkg.log +do + echo UNREPRODUCIBLE_FILE > /var/log/$file +done + +echo 'base-setup done.' + +# end base-setup.sh diff --git a/cross-build/build.sh b/cross-build/build.sh new file mode 100755 index 00000000..5ba8dd57 --- /dev/null +++ b/cross-build/build.sh @@ -0,0 +1,439 @@ +#!/bin/sh +# build.sh +# +# ungoogled-chromium cross-build script: GNU/Linux to Microsoft Windows +# +# This script should be run inside the build container environment. +# + +# Defaults +# +git=no +target_cpu= # default is x64 +ccache= +debug=no +idle=no +ci=no + +print_usage() +{ + cat << END +usage: $0 [--git] [--arm64|--x86] [--ccache] [--idle] [--ci] + +options: + --git build from a Git checkout instead of a source tarball + --arm64 build for 64-bit ARM instead of Intel + --x86 build for x86 (32-bit) instead of x64 + --ccache use ccache for faster rebuilds + --debug build with debugging enabled + --idle reduce CPU priority for build process + --ci running in continuous-integration job (experts only) + --help print this help +END + exit 1 +} + +# +# Parse command-line options +# + +while [ -n "$1" ] +do + case "$1" in + --git) git=yes ;; + --arm64) target_cpu=arm64 ;; + --x64) target_cpu= ;; + --x86) target_cpu=x86 ;; + --ccache) ccache=-ccache ;; + --debug) debug=yes ;; + --idle) idle=yes ;; + --ci) ci=yes ;; + -h|--help) print_usage ;; + -*) echo "$0: error: unrecognized option \"$1\""; exit 1 ;; + *) echo "$0: error: unrecognized argument \"$1\""; exit 1 ;; + esac + shift +done + +wrap= + +run() +{ + local t= w= + + if [ "_$1" = _--time ] + then + t='time --verbose' + shift + fi + if [ "_$1" = _--wrap ] + then + w="$wrap" + shift + fi + + echo "+ $*" + $t $w env "$@" + echo ' ' +} + +set -e + +u_c=$(cd ../ungoogled-chromium && pwd) +u_c_w=$(cd .. && pwd) + +# +# Sanity checks +# + +if [ ! -f $u_c/chromium_version.txt ] +then + echo "$0: error: ungoogled-chromium Git tree is not present at $u_c/" + echo "(Did you clone with --recurse-submodules ?)" + exit 1 + +fi + +if [ ! -d /opt/rust/sysroot ] +then + echo "$0: error: Rust installation is not present" + echo "(Please run this script inside the build container)" + exit 1 +fi + +if [ "_$target_cpu" = _arm64 -a \ + ! -d /opt/rust/sysroot/lib/rustlib/aarch64-pc-windows-msvc/lib ] +then + echo "$0: error: Rust installation lacks ARM64 support" + echo "(Please build the container with MULTI_ARCH=1)" + exit 1 +fi + +if [ "_$target_cpu" = _x86 -a \ + ! -d /opt/rust/sysroot/lib/rustlib/i686-pc-windows-msvc/lib ] +then + echo "$0: error: Rust installation lacks x86 (32-bit) support" + echo "(Please build the container with MULTI_ARCH=1)" + exit 1 +fi + +if [ ! -d /opt/microsoft/VC ] +then + echo "$0: error: Windows SDK is not present" + echo "(Please run this script inside the build container)" + exit 1 +fi + +if [ "_$target_cpu" = _arm64 -a \ + ! -f /opt/microsoft/VC/Tools/MSVC/*/bin/HostX64/arm64/cl.exe ] +then + echo "$0: error: Windows SDK lacks ARM64 support" + echo "(Please build the container with MULTI_ARCH=1)" + exit 1 +fi + +if [ "_$target_cpu" = _x86 -a \ + ! -f /opt/microsoft/VC/Tools/MSVC/*/bin/Hostx86/x86/cl.exe ] +then + echo "$0: error: Windows SDK lacks x86 (32-bit) support" + echo "(Please build the container with MULTI_ARCH=1)" + exit 1 +fi + +if [ ! -x /usr/local/bin/rc ] +then + echo "$0: error: Google RC is not present" + echo "(Please run this script inside the build container)" + exit 1 +fi + +# +# Collect some information +# + +chromium_version=$(cat $u_c/chromium_version.txt) +u_c_commit=$(cd $u_c && git log -1 --format='%h') +u_c_w_commit=$(git log -1 --format='%h') + +cat << END +---------------------------------------------- +Chromium upstream version: $chromium_version +ungoogled-chromium Git commit: $u_c_commit +ungoogled-chromium-windows Git commit: $u_c_w_commit +---------------------------------------------- +END +echo ' ' + +# +# Download and unpack sources +# + +mkdir -p build/download_cache +cd build + +hide_progress=$(test -t 1 || echo --hide-progress-bar) + +if [ ! -f stamp-download ] +then + if [ $git = yes ] + then + pgo=$(test $x86 = yes && echo win32 || echo win64) + run $u_c/utils/clone.py --output src --pgo $pgo + else + dl_args="--ini $u_c/downloads.ini --cache download_cache" + + if [ $ci = no ] + then + run $u_c/utils/downloads.py retrieve $dl_args $hide_progress + fi + + run $u_c/utils/downloads.py unpack $dl_args src + fi + touch stamp-download +fi + +if [ ! -f src/BUILD.gn ] +then + echo "$0: error: no Chromium source tree is present" + exit 1 +fi + +if [ ! -f stamp-download-more ] +then + dl_args="--ini ../../downloads.ini --cache download_cache" + + run $u_c/utils/downloads.py retrieve --components directx-headers $dl_args $hide_progress + + run $u_c/utils/downloads.py unpack --components directx-headers $dl_args src + + touch stamp-download-more +fi + +# +# Prune binaries +# + +if [ ! -f stamp-prune ] +then + run $u_c/utils/prune_binaries.py src $u_c/pruning.list + touch stamp-prune +fi + +# +# Apply patches +# + +if [ ! -d patches ] +then + # Create combined patches directory for quilt(1) + # + mkdir patches + for dir in $u_c/patches/* $u_c_w/patches/* + do + test -d "$dir" || continue + ln -s $dir patches + done + + (cat $u_c/patches/series + echo + cat $u_c_w/patches/series + ) > patches/series +fi + +if [ ! -f stamp-patch ] +then + (cd src && run QUILT_PATCHES=../patches quilt push -aq --fuzz=0) + touch stamp-patch +fi + +# +# Substitute domains +# + +if [ ! -f stamp-substitute ] +then + run $u_c/utils/domain_substitution.py \ + apply \ + --regex $u_c/domain_regex.list \ + --files $u_c/domain_substitution.list \ + src + touch stamp-substitute +fi + +# +# Initialize default Wine prefix +# + +if [ ! -d ~/.wine ] +then + run WINEDEBUG=-all wineboot --init + + # Use /tmp for Windows temp files + (cd ~/.wine/drive_c/users/$(whoami) && rmdir Temp && ln -s /tmp Temp) +fi + +# +# Define execution wrappers for the build +# + +if bwrap --bind / / true 2>/dev/null +then + # Use bubblewrap for sandboxing: + # * Allow write access only to the source/build tree and /tmp + # * Use separate /dev (allows writing to e.g. /dev/null) + # * Disallow network access + wrap="$wrap bwrap --ro-bind / / --dev /dev --bind /tmp /tmp --bind $PWD/src $PWD/src --unshare-net" +fi + +if [ $idle = yes ] +then + wrap="$wrap ionice -c 3 chrt -i 0 nice -n 19" +fi + +cd src + +out_dir=out/Default${target_cpu:+-$target_cpu} + +# +# Build GN +# + +mkdir -p $out_dir + +if [ ! -x out/gn ] +then + run --wrap \ + CXX=/opt/llvm/bin/clang++ \ + CXXFLAGS='-stdlib=libc++' \ + AR=/opt/llvm/bin/llvm-ar \ + LDFLAGS='-fuse-ld=lld -stdlib=libc++ -l:libc++abi.a' \ + tools/gn/bootstrap/bootstrap.py \ + --build-path=out \ + --skip-generate-buildfiles +fi +if [ ! -x $out_dir/gn ] +then + ln -s ../gn $out_dir +fi + +# +# Prepare build configuration +# + +flags=$out_dir/args.gn + +(cat $u_c/flags.gn + echo + cat $u_c_w/flags.windows.gn + echo +) > $flags.new + +if [ -n "$target_cpu" ] +then + sed -i "/^target_cpu=/s/x64/$target_cpu/" $flags.new +fi +if [ $debug = yes ] +then + sed -i \ + -e '/^is_debug=/s/false/true/' \ + -e '/^is_official_build=/s/true/false/' \ + -e '/^\w*symbol_level=/s/0/1/' \ + $flags.new +fi + +clang_dir=/opt/llvm$ccache + +clang_ver=$(/opt/llvm/bin/clang++ --version \ + | sed -nr 's/.*clang version ([0-9]+)\..+/\1/p') + +cat >> $flags.new << END +clang_base_path="$clang_dir" +clang_version="$clang_ver" +rust_sysroot_absolute="/opt/rust/sysroot" +rust_bindgen_root="/opt/rust/bindgen" +rustc_version="$(/opt/rust/sysroot/bin/rustc --version)" +# XXX temporary +use_v8_context_snapshot=true +ozone_platform_wayland=false +ozone_platform_x11=false +rtc_use_x11_extensions=false +use_alsa=false +use_cups=false +use_dbus=false +use_pangocairo=false +use_pulseaudio=false +use_udev=false +use_xkbcommon=false +END + +if [ -f ../../extra-flags.gn ] +then + (echo; cat ../../extra-flags.gn) >> $flags.new +fi + +if [ -f $out_dir/build.ninja ] && cmp -s $flags.new $flags +then + rm $flags.new + run_gn=no +else + echo "======== begin GN flags ========" + sed 's/^$/ /' $flags.new + echo "========= end GN flags =========" + echo ' ' + mv -f $flags.new $flags + run_gn=yes +fi + +# +# Generate build files, and perform the build +# + +if [ $run_gn = yes ] +then + run --wrap out/gn gen $out_dir --fail-on-unused-args +fi + +targets='chrome chromedriver mini_installer' + +if [ $ci = yes ] +then + echo $targets > $out_dir/build.targets + exit 0 +fi + +run --time --wrap ${NINJA:-ninja} -C $out_dir $targets + +# +# Package up the build +# + +(cd ../.. + run python3 ../package.py --build-outputs build/src/$out_dir + ls -l build/ungoogled-chromium_* + echo ' ' + sha256sum build/ungoogled-chromium_* + echo ' ' +) + +# +# Hash sums to troubleshoot reproducibility issues +# + +echo 'Generating hash sums of source and build trees ...' + +(echo '# ungoogled-chromium source and build tree MD5 hash sums' + echo '# (sorted by last-modified timestamp, oldest to newest)' + echo "# Chromium upstream version: $chromium_version" + echo "# ungoogled-chromium Git commit: $u_c_commit" + echo "# ungoogled-chromium-windows Git commit: $u_c_w_commit" + test -z "$target_cpu" || echo "# Target CPU: $target_cpu" + find . -type f -printf '%T@\t%P\n' \ + | LC_COLLATE=C sort -k 1,1g -k 2b \ + | cut -f2- \ + | xargs -d '\n' md5sum +) > ../MD5SUMS${target_cpu:+-$target_cpu}.repro + +echo ' ' +echo 'Build complete.' + +# end build.sh diff --git a/cross-build/case-fold.sh b/cross-build/case-fold.sh new file mode 100755 index 00000000..02bad0e9 --- /dev/null +++ b/cross-build/case-fold.sh @@ -0,0 +1,317 @@ +#!/bin/sh +# case-fold.sh + +set -e + +# Create specific mixed-case symlinks for lowercase-named files in a given +# directory, e.g. +# +# WinString.h -> winstring.h +# +mixed_case_files() +{ + local dir=$1 + shift + (cd "$dir" || exit + for x in $* + do + lc_x=$(echo "$x" | tr A-Z a-z) + test -f $lc_x || { echo "error: $dir/$lc_x: not found"; exit 1; } + test -f $x || ln -sv $lc_x $x + done + ) +} + +# Create lowercase symlinks for most (all?) mixed-case header and library +# files in the specified directory, e.g. +# +# winsock2.h -> WinSock2.h +# advapi32.lib -> AdvAPI32.Lib +# +case_fold_dir() +{ + local dir=$1 + (cd "$dir" || exit + for x in [A-Z]*.h *.[Ll][Ii][Bb] + do + test -f $x || continue + lc_x=$(echo "$x" | tr A-Z a-z) + test "_$x" != "_$lc_x" || continue + test -f $lc_x || ln -sv $x $lc_x + done + ) +} + +# Optional base directory +# +test -z "$1" || cd "$1" + +(cd VC/Tools/MSVC/*/bin && (test -d HostX64 || ln -sv Hostx64 HostX64)) + +case_fold_dir Windows?Kits/10/Include/*/shared +case_fold_dir Windows?Kits/10/Include/*/um +case_fold_dir Windows?Kits/10/Include/*/winrt + +mixed_case_files VC/Tools/MSVC/*/include \ + DelayIMP.h + +mixed_case_files Windows?Kits/10/Include/*/shared \ + DXGI1_4.h \ + DXGIType.h \ + DriverSpecs.h \ + POPPACK.H \ + PSHPACK1.H \ + Sddl.h \ + SpecStrings.h \ + WinDef.h \ + WlanTypes.h + +mixed_case_files Windows?Kits/10/Include/*/um \ + AudioClient.h \ + Combaseapi.h \ + D2DBaseTypes.h \ + D3Dcommon.h \ + DWrite.h \ + DWrite_1.h \ + DWrite_2.h \ + EapTypes.h \ + FontSub.h \ + Functiondiscoverykeys_devpkey.h \ + GdiplusBase.h \ + GdiplusBitmap.h \ + GdiplusBrush.h \ + GdiplusCachedBitmap.h \ + GdiplusColor.h \ + GdiplusColorMatrix.h \ + GdiplusEffects.h \ + GdiplusEnums.h \ + GdiplusFlat.h \ + GdiplusFont.h \ + GdiplusFontCollection.h \ + GdiplusFontFamily.h \ + GdiplusGpStubs.h \ + GdiplusGraphics.h \ + GdiplusHeaders.h \ + GdiplusImageAttributes.h \ + GdiplusImageCodec.h \ + GdiplusImaging.h \ + GdiplusInit.h \ + GdiplusLineCaps.h \ + GdiplusMatrix.h \ + GdiplusMem.h \ + GdiplusMetaHeader.h \ + GdiplusMetafile.h \ + GdiplusPath.h \ + GdiplusPen.h \ + GdiplusPixelFormats.h \ + GdiplusRegion.h \ + GdiplusStringFormat.h \ + GdiplusTypes.h \ + MFTransform.h \ + MMDeviceAPI.h \ + NCrypt.h \ + OAIdl.h \ + OCIdl.h \ + ObjBase.h \ + ObjIdl.h \ + Ole2.h \ + OleCtl.h \ + PowrProf.h \ + SPError.h \ + Sensors.h \ + Shlobj.h \ + T2EmbApi.h \ + VFWMSGS.H \ + VSStyle.h \ + Vssym32.h \ + Winsock2.h \ + Winuser.h \ + Ws2spi.h \ + XInput.h \ + XpsObjectModel.h \ + restrictedErrorInfo.h \ + windows.graphics.directX.direct3d11.interop.h + +mixed_case_files Windows?Kits/10/Include/*/winrt \ + AsyncInfo.h \ + IVectorChangedEventArgs.h \ + Inspectable.h \ + WinString.h \ + Windows.ApplicationModel.h \ + Windows.ApplicationModel.Activation.h \ + Windows.ApplicationModel.AppService.h \ + Windows.ApplicationModel.Appointments.h \ + Windows.ApplicationModel.Appointments.AppointmentsProvider.h \ + Windows.ApplicationModel.Background.h \ + Windows.ApplicationModel.Calls.h \ + Windows.ApplicationModel.Calls.Background.h \ + Windows.ApplicationModel.CommunicationBlocking.h \ + Windows.ApplicationModel.Contacts.h \ + Windows.ApplicationModel.Contacts.Provider.h \ + Windows.ApplicationModel.Core.h \ + Windows.ApplicationModel.DataTransfer.h \ + Windows.ApplicationModel.DataTransfer.DragDrop.h \ + Windows.ApplicationModel.DataTransfer.ShareTarget.h \ + Windows.ApplicationModel.Email.h \ + Windows.ApplicationModel.Payments.h \ + Windows.ApplicationModel.Search.h \ + Windows.ApplicationModel.SocialInfo.h \ + Windows.ApplicationModel.UserActivities.h \ + Windows.ApplicationModel.UserDataAccounts.h \ + Windows.ApplicationModel.UserDataAccounts.Provider.h \ + Windows.ApplicationModel.UserDataTasks.h \ + Windows.ApplicationModel.Wallet.h \ + Windows.Data.Json.h \ + Windows.Data.Text.h \ + Windows.Data.Xml.Dom.h \ + Windows.Devices.h \ + Windows.Devices.Adc.Provider.h \ + Windows.Devices.Bluetooth.h \ + Windows.Devices.Bluetooth.Advertisement.h \ + Windows.Devices.Bluetooth.Background.h \ + Windows.Devices.Bluetooth.GenericAttributeProfile.h \ + Windows.Devices.Bluetooth.Rfcomm.h \ + Windows.Devices.Display.h \ + Windows.Devices.Enumeration.h \ + Windows.Devices.Geolocation.h \ + Windows.Devices.Gpio.Provider.h \ + Windows.Devices.Haptics.h \ + Windows.Devices.HumanInterfaceDevice.h \ + Windows.Devices.I2c.Provider.h \ + Windows.Devices.Input.h \ + Windows.Devices.Lights.h \ + Windows.Devices.Perception.h \ + Windows.Devices.PointOfService.h \ + Windows.Devices.Power.h \ + Windows.Devices.Printers.h \ + Windows.Devices.Printers.Extensions.h \ + Windows.Devices.Pwm.Provider.h \ + Windows.Devices.Radios.h \ + Windows.Devices.Sensors.h \ + Windows.Devices.SmartCards.h \ + Windows.Devices.Sms.h \ + Windows.Devices.Spi.Provider.h \ + Windows.Foundation.h \ + Windows.Foundation.Numerics.h \ + Windows.Gaming.Input.h \ + Windows.Gaming.Input.Custom.h \ + Windows.Gaming.Input.ForceFeedback.h \ + Windows.Gaming.Preview.h \ + Windows.Gaming.XboxLive.h \ + Windows.Globalization.h \ + Windows.Graphics.h \ + Windows.Graphics.DirectX.Direct3D11.h \ + Windows.Graphics.DirectX.h \ + Windows.Graphics.Display.h \ + Windows.Graphics.Effects.h \ + Windows.Graphics.Imaging.h \ + Windows.Graphics.Printing.h \ + Windows.Graphics.Printing.PrintTicket.h \ + Windows.Management.Deployment.h \ + Windows.Media.h \ + Windows.Media.Audio.h \ + Windows.Media.Capture.h \ + Windows.Media.Capture.Core.h \ + Windows.Media.Capture.Frames.h \ + Windows.Media.Casting.h \ + Windows.Media.ClosedCaptioning.h \ + Windows.Media.Core.h \ + Windows.Media.Devices.h \ + Windows.Media.Devices.Core.h \ + Windows.Media.Editing.h \ + Windows.Media.Effects.h \ + Windows.Media.FaceAnalysis.h \ + Windows.Media.MediaProperties.h \ + Windows.Media.PlayTo.h \ + Windows.Media.Playback.h \ + Windows.Media.Protection.h \ + Windows.Media.Render.h \ + Windows.Media.SpeechRecognition.h \ + Windows.Media.Streaming.h \ + Windows.Media.Streaming.Adaptive.h \ + Windows.Media.Transcoding.h \ + Windows.Networking.h \ + Windows.Networking.BackgroundTransfer.h \ + Windows.Networking.Connectivity.h \ + Windows.Networking.Sockets.h \ + Windows.Perception.h \ + Windows.Perception.People.h \ + Windows.Perception.Spatial.h \ + Windows.Phone.h \ + Windows.Security.Authentication.Web.h \ + Windows.Security.Authentication.Web.Core.h \ + Windows.Security.Authentication.Web.Provider.h \ + Windows.Security.Authorization.AppCapabilityAccess.h \ + Windows.Security.Credentials.h \ + Windows.Security.Cryptography.Certificates.h \ + Windows.Security.Cryptography.Core.h \ + Windows.Security.EnterpriseData.h \ + Windows.Services.Maps.h \ + Windows.Services.Maps.LocalSearch.h \ + Windows.Storage.h \ + Windows.Storage.FileProperties.h \ + Windows.Storage.Pickers.Provider.h \ + Windows.Storage.Provider.h \ + Windows.Storage.Search.h \ + Windows.Storage.Streams.h \ + Windows.System.h \ + Windows.System.Diagnostics.h \ + Windows.System.Power.h \ + Windows.System.RemoteSystems.h \ + Windows.System.Threading.h \ + Windows.UI.h \ + Windows.UI.Composition.h \ + Windows.UI.Core.h \ + Windows.UI.Core.CoreWindowFactory.h \ + Windows.UI.Input.h \ + Windows.UI.Input.Inking.h \ + Windows.UI.Input.Spatial.h \ + Windows.UI.Notifications.h \ + Windows.UI.Popups.h \ + Windows.UI.Shell.h \ + Windows.UI.StartScreen.h \ + Windows.UI.Text.h \ + Windows.UI.Text.Core.h \ + Windows.UI.UIAutomation.h \ + Windows.UI.ViewManagement.h \ + Windows.UI.WindowManagement.h \ + Windows.UI.Xaml.h \ + Windows.UI.Xaml.Automation.h \ + Windows.UI.Xaml.Automation.Peers.h \ + Windows.UI.Xaml.Automation.Provider.h \ + Windows.UI.Xaml.Automation.Text.h \ + Windows.UI.Xaml.Controls.h \ + Windows.UI.Xaml.Controls.Primitives.h \ + Windows.UI.Xaml.Data.h \ + Windows.UI.Xaml.Documents.h \ + Windows.UI.Xaml.Input.h \ + Windows.UI.Xaml.Interop.h \ + Windows.UI.Xaml.Media.h \ + Windows.UI.Xaml.Media.Animation.h \ + Windows.UI.Xaml.Media.Imaging.h \ + Windows.UI.Xaml.Media.Media3D.h \ + Windows.UI.Xaml.Navigation.h \ + Windows.Web.h \ + Windows.Web.Http.h \ + Windows.Web.Http.Filters.h \ + Windows.Web.Http.Headers.h \ + Windows.Web.Syndication.h \ + Windows.Web.UI.h \ + windows.graphics.directX.direct3d11.h + +for arch in arm arm64 x64 x86 +do + dir=$(echo Windows?Kits/10/Lib/*/um/$arch) + test -d "$dir" || continue + + case_fold_dir "$dir" + + mixed_case_files "$dir" \ + Bthprops.lib \ + Cfgmgr32.lib \ + Crypt32.lib \ + Propsys.lib \ + Setupapi.lib +done + +# end case-fold.sh diff --git a/cross-build/ccache.conf b/cross-build/ccache.conf new file mode 100644 index 00000000..29891e49 --- /dev/null +++ b/cross-build/ccache.conf @@ -0,0 +1,9 @@ +# /etc/ccache.conf + +compiler_check = %compiler% -v +inode_cache = false +max_size = 10G +path = /opt/llvm/bin:/usr/bin +sloppiness = include_file_ctime, include_file_mtime, locale, time_macros + +# EOF diff --git a/cross-build/gen-setenv.py b/cross-build/gen-setenv.py new file mode 100755 index 00000000..deef6338 --- /dev/null +++ b/cross-build/gen-setenv.py @@ -0,0 +1,168 @@ +#!/usr/bin/env python3 +# +# This script generates JSON files that are easier-to-parse equivalents of +# the SetEnv batch script provided by Visual Studio, which sets necessary +# environment variables. These JSON files are used by the Chromium build +# system to obtain specific paths to Microsoft SDK resources. +# +# The GenerateSetEnvCmd() function is lightly modified from +# https://chromium.googlesource.com/chromium/tools/depot_tools.git/+/5eef793337de052b29dc5ec6eb22676938dadbe4/win_toolchain/package_from_installed.py +# (note: not a direct download link) +# + +import collections +import glob +import json +import os +import sys + +dir = sys.argv[1] + +_vc_tools = glob.glob('VC/Tools/MSVC/*', root_dir=dir)[0] +_win_version = glob.glob('[1-9]*', root_dir=f'{dir}/Windows Kits/10/Lib')[0] + +def GenerateSetEnvCmd(target_dir): + """Generate a batch file that gyp expects to exist to set up the compiler + environment. + + This is normally generated by a full install of the SDK, but we + do it here manually since we do not do a full install.""" + vc_tools_parts = _vc_tools.split('/') + + # All these paths are relative to the root of the toolchain package. + include_dirs = [ + ['Windows Kits', '10', 'Include', _win_version, 'um'], + ['Windows Kits', '10', 'Include', _win_version, 'shared'], + ['Windows Kits', '10', 'Include', _win_version, 'winrt'], + ] + include_dirs.append(['Windows Kits', '10', 'Include', _win_version, 'ucrt']) + include_dirs.extend([ + vc_tools_parts + ['include'], + vc_tools_parts + ['atlmfc', 'include'], + ]) + libpath_dirs = [ + vc_tools_parts + ['lib', 'x86', 'store', 'references'], + ['Windows Kits', '10', 'UnionMetadata', _win_version], + ] + # Common to x86, x64, and arm64 + env = collections.OrderedDict([ + # Yuck: These have a trailing \ character. No good way to represent this + # in an OS-independent way. + ('VSINSTALLDIR', [['.\\']]), + ('VCINSTALLDIR', [['VC\\']]), + ('INCLUDE', include_dirs), + ('LIBPATH', libpath_dirs), + ]) + # x86. Always use amd64_x86 cross, not x86 on x86. + env['VCToolsInstallDir'] = [vc_tools_parts[:]] + # Yuck: This one ends in a path separator as well. + env['VCToolsInstallDir'][0][-1] += os.path.sep + env_x86 = collections.OrderedDict([ + ( + 'PATH', + [ + ['Windows Kits', '10', 'bin', _win_version, 'x64'], + vc_tools_parts + ['bin', 'HostX64', 'x86'], + vc_tools_parts + + ['bin', 'HostX64', 'x64'], # Needed for mspdb1x0.dll. + ]), + ('LIB', [ + vc_tools_parts + ['lib', 'x86'], + ['Windows Kits', '10', 'Lib', _win_version, 'um', 'x86'], + ['Windows Kits', '10', 'Lib', _win_version, 'ucrt', 'x86'], + vc_tools_parts + ['atlmfc', 'lib', 'x86'], + ]), + ]) + + # x64. + env_x64 = collections.OrderedDict([ + ('PATH', [ + ['Windows Kits', '10', 'bin', _win_version, 'x64'], + vc_tools_parts + ['bin', 'HostX64', 'x64'], + ]), + ('LIB', [ + vc_tools_parts + ['lib', 'x64'], + ['Windows Kits', '10', 'Lib', _win_version, 'um', 'x64'], + ['Windows Kits', '10', 'Lib', _win_version, 'ucrt', 'x64'], + vc_tools_parts + ['atlmfc', 'lib', 'x64'], + ]), + ]) + + # arm64. + env_arm64 = collections.OrderedDict([ + ('PATH', [ + ['Windows Kits', '10', 'bin', _win_version, 'x64'], + vc_tools_parts + ['bin', 'HostX64', 'arm64'], + vc_tools_parts + ['bin', 'HostX64', 'x64'], + ]), + ('LIB', [ + vc_tools_parts + ['lib', 'arm64'], + ['Windows Kits', '10', 'Lib', _win_version, 'um', 'arm64'], + ['Windows Kits', '10', 'Lib', _win_version, 'ucrt', 'arm64'], + vc_tools_parts + ['atlmfc', 'lib', 'arm64'], + ]), + ]) + + def BatDirs(dirs): + return ';'.join(['%cd%\\' + os.path.join(*d) for d in dirs]) + + set_env_prefix = os.path.join(target_dir, r'Windows Kits/10/bin/SetEnv') + with open(set_env_prefix + '.cmd', 'w') as f: + # The prologue changes the current directory to the root of the + # toolchain package, so that path entries can be set up without needing + # ..\..\..\ components. + f.write('@echo off\n' + ':: Generated by win_toolchain\\package_from_installed.py.\n' + 'pushd %~dp0..\\..\\..\n') + for var, dirs in env.items(): + f.write('set %s=%s\n' % (var, BatDirs(dirs))) + f.write('if "%1"=="/x64" goto x64\n') + f.write('if "%1"=="/arm64" goto arm64\n') + + for var, dirs in env_x86.items(): + f.write('set %s=%s%s\n' % + (var, BatDirs(dirs), ';%PATH%' if var == 'PATH' else '')) + f.write('goto :END\n') + + f.write(':x64\n') + for var, dirs in env_x64.items(): + f.write('set %s=%s%s\n' % + (var, BatDirs(dirs), ';%PATH%' if var == 'PATH' else '')) + f.write('goto :END\n') + + f.write(':arm64\n') + for var, dirs in env_arm64.items(): + f.write('set %s=%s%s\n' % + (var, BatDirs(dirs), ';%PATH%' if var == 'PATH' else '')) + f.write('goto :END\n') + f.write(':END\n') + # Restore the original directory. + f.write('popd\n') + with open(set_env_prefix + '.x86.json', 'wt', newline='') as f: + assert not set(env.keys()) & set(env_x86.keys()), 'dupe keys' + json.dump( + { + 'env': + collections.OrderedDict( + list(env.items()) + list(env_x86.items())) + }, f, indent=2) + with open(set_env_prefix + '.x64.json', 'wt', newline='') as f: + assert not set(env.keys()) & set(env_x64.keys()), 'dupe keys' + json.dump( + { + 'env': + collections.OrderedDict( + list(env.items()) + list(env_x64.items())) + }, f, indent=2) + with open(set_env_prefix + '.arm64.json', 'wt', newline='') as f: + assert not set(env.keys()) & set(env_arm64.keys()), 'dupe keys' + json.dump( + { + 'env': + collections.OrderedDict( + list(env.items()) + list(env_arm64.items())) + }, f, indent=2) + +GenerateSetEnvCmd(dir) + +# EOF diff --git a/cross-build/gh-unburden.sh b/cross-build/gh-unburden.sh new file mode 100755 index 00000000..47658c88 --- /dev/null +++ b/cross-build/gh-unburden.sh @@ -0,0 +1,41 @@ +#!/bin/sh +# gh-unburden.sh +# +# Script to delete unnecessary files on the GitHub runner to make more +# working space available. Should be invoked in the top-level directory +# of the runner's root filesystem. +# + +set -e + +# Must run as root +test $(id -u) -eq 0 + +# Must run at the base of the runner's root filesystem +# (which may not be "/" if inside a container) +test -f etc/passwd + +echo Before: +df -m . + +# Note that /opt/hostedtoolcache/ is mounted elsewhere inside the +# container, so don't remove the directory, only its contents + +rm -rf \ + opt/hostedtoolcache/* \ + usr/lib/google-cloud-sdk \ + usr/lib/jvm \ + usr/local/.ghcup \ + usr/local/julia* \ + usr/local/lib/android \ + usr/local/lib/node_modules \ + usr/local/share/chromium \ + usr/local/share/powershell \ + usr/share/dotnet \ + usr/share/miniconda \ + usr/share/swift + +echo After: +df -m . + +# end gh-unburden.sh diff --git a/cross-build/rootfs-sums.sh b/cross-build/rootfs-sums.sh new file mode 100755 index 00000000..50af168a --- /dev/null +++ b/cross-build/rootfs-sums.sh @@ -0,0 +1,47 @@ +#!/bin/sh +# rootfs-sums.sh +# +# A script to facilitate generating/verifying MD5 hash sums for all files +# in a [container's] root filesystem +# + +set -e + +mnt=/tmp/rootfs + +do_bind_mount() +{ + mkdir $mnt + mount --bind --make-private / $mnt + cd $mnt +} + +if [ $(id -u) -ne 0 ] +then + echo "$0: error: must run as root" + exit 1 +fi + +case "$1" in + generate) + do_bind_mount + find . -type f -printf '%P\n' \ + | LC_COLLATE=C sort \ + | xargs -d '\n' md5sum + cd; umount $mnt + ;; + + verify) + do_bind_mount + md5sum --quiet --check && echo OK # read input from stdin + cd; umount $mnt + ;; + + *) + echo 'usage:' + echo " $0 generate > MD5SUMS" + echo " $0 verify < MD5SUMS" + ;; +esac + +# end rootfs-sums.sh diff --git a/flags.windows.gn b/flags.windows.gn index 98e748e8..8110878d 100644 --- a/flags.windows.gn +++ b/flags.windows.gn @@ -6,6 +6,7 @@ is_component_build=false is_debug=false is_official_build=true proprietary_codecs=true +target_os="win" target_cpu="x64" use_sysroot=false dcheck_always_on=false diff --git a/package.py b/package.py index 940244a6..baabe0f9 100644 --- a/package.py +++ b/package.py @@ -38,7 +38,7 @@ def _get_target_cpu(build_outputs): if not _cached_target_cpu: with open(build_outputs / 'args.gn', 'r') as f: args_gn_text = f.read() - for cpu in ('x64', 'x86'): + for cpu in ('arm', 'arm64', 'x64', 'x86'): if f'target_cpu="{cpu}"' in args_gn_text: _cached_target_cpu = cpu break @@ -49,6 +49,13 @@ def main(): """Entrypoint""" parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument( + '--build-outputs', + metavar='DIR', + type=Path, + default=Path('build/src/out/Default'), + help=('Top-level directory of build tree. ' + 'Default: %(default)s')) parser.add_argument( '--cpu-arch', metavar='ARCH', @@ -59,12 +66,10 @@ def main(): 'Default (from platform.architecture()): %(default)s')) args = parser.parse_args() - build_outputs = Path('build/src/out/Default') - - shutil.copyfile('build/src/out/Default/mini_installer.exe', + shutil.copyfile(args.build_outputs / 'mini_installer.exe', 'build/ungoogled-chromium_{}-{}.{}_installer_{}.exe'.format( get_chromium_version(), _get_release_revision(), - _get_packaging_revision(), _get_target_cpu(build_outputs))) + _get_packaging_revision(), _get_target_cpu(args.build_outputs))) timestamp = None try: @@ -75,7 +80,7 @@ def main(): output = Path('build/ungoogled-chromium_{}-{}.{}_windows_{}.zip'.format( get_chromium_version(), _get_release_revision(), - _get_packaging_revision(), _get_target_cpu(build_outputs))) + _get_packaging_revision(), _get_target_cpu(args.build_outputs))) excluded_files = set([ Path('mini_installer.exe'), @@ -85,9 +90,9 @@ def main(): ]) files_generator = filescfg.filescfg_generator( Path('build/src/chrome/tools/build/win/FILES.cfg'), - build_outputs, args.cpu_arch, excluded_files) + args.build_outputs, args.cpu_arch, excluded_files) filescfg.create_archive( - files_generator, tuple(), build_outputs, output, timestamp) + files_generator, tuple(), args.build_outputs, output, timestamp) if __name__ == '__main__': main() diff --git a/patches/series b/patches/series index ea0618d9..ac1ce70d 100644 --- a/patches/series +++ b/patches/series @@ -1,5 +1,7 @@ +ungoogled-chromium/windows/windows-use-system-tools.patch ungoogled-chromium/windows/windows-disable-reorder-fix-linking.patch ungoogled-chromium/windows/windows-disable-win-build-output.patch +ungoogled-chromium/windows/windows-run-midl-via-wine.patch ungoogled-chromium/windows/windows-disable-rcpy.patch ungoogled-chromium/windows/windows-fix-building-gn.patch ungoogled-chromium/windows/windows-disable-encryption.patch @@ -21,3 +23,4 @@ ungoogled-chromium/windows/windows-fix-licenses-gn-path.patch ungoogled-chromium/windows/windows-fix-building-with-rust.patch ungoogled-chromium/windows/windows-fix-remove-unused-preferences-fields.patch ungoogled-chromium/windows/windows-fix-missing-includes.patch +ungoogled-chromium/windows/windows-fix-bindgen-libclang-path.patch diff --git a/patches/ungoogled-chromium/windows/windows-compile-mini-installer.patch b/patches/ungoogled-chromium/windows/windows-compile-mini-installer.patch index fbc1d142..67b05e58 100644 --- a/patches/ungoogled-chromium/windows/windows-compile-mini-installer.patch +++ b/patches/ungoogled-chromium/windows/windows-compile-mini-installer.patch @@ -13,19 +13,21 @@ ] --- a/chrome/tools/build/win/create_installer_archive.py +++ b/chrome/tools/build/win/create_installer_archive.py -@@ -224,12 +224,14 @@ def GenerateDiffPatch(options, orig_file - +@@ -225,11 +225,19 @@ def GetLZMAExec(build_dir): -- executable = '7za' -- if sys.platform == 'win32': -- executable += '.exe' -+ # Taken from ungoogled-chromium's _extraction.py file, modified for Python 2 -+ import winreg -+ sub_key_7zfm = 'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\App Paths\\7zFM.exe' + executable = '7za' ++ if sys.platform != 'win32': ++ return executable # find in PATH + if sys.platform == 'win32': + executable += '.exe' - return os.path.join(build_dir, "..", "..", "third_party", "lzma_sdk", "bin", - "host_platform", executable) ++ # Taken from ungoogled-chromium's _extraction.py file, modified for Python 2 ++ import winreg ++ sub_key_7zfm = 'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\App Paths\\7zFM.exe' ++ + with winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, sub_key_7zfm) as key_handle: + sevenzipfm_dir = winreg.QueryValueEx(key_handle, 'Path')[0] + diff --git a/patches/ungoogled-chromium/windows/windows-disable-rcpy.patch b/patches/ungoogled-chromium/windows/windows-disable-rcpy.patch index 78894523..90e54439 100644 --- a/patches/ungoogled-chromium/windows/windows-disable-rcpy.patch +++ b/patches/ungoogled-chromium/windows/windows-disable-rcpy.patch @@ -1,25 +1,21 @@ -# Disable use of rc.py, which requires Google's rc.exe executable -# This patch uses Microsoft's rc.exe to generate the resource files, and does not do any verifiation +# Disable use of rc.py on Windows, which requires Google's rc.exe +# executable. Instead, use Microsoft's rc.exe to generate the +# resource files. --- a/build/toolchain/win/tool_wrapper.py +++ b/build/toolchain/win/tool_wrapper.py -@@ -168,10 +168,12 @@ class WinTool(object): +@@ -168,6 +168,11 @@ class WinTool(object): """Converts .rc files to .res files.""" env = self._GetEnv(arch) args = list(args) -- rcpy_args = args[:] -- rcpy_args[0:1] = [sys.executable, os.path.join(BASE_DIR, 'rc', 'rc.py')] -- rcpy_args.append('/showIncludes') -- return subprocess.call(rcpy_args, env=env) + + if sys.platform == 'win32': + rc_exe_exit_code = subprocess.call(args, shell=True, env=env) + return rc_exe_exit_code -+ else: -+ raise RuntimeError('Must run on Windows.') - - def ExecActionWrapper(self, arch, rspfile, *dirname): - """Runs an action command line from a response file using the environment ++ + rcpy_args = args[:] + rcpy_args[0:1] = [sys.executable, os.path.join(BASE_DIR, 'rc', 'rc.py')] + rcpy_args.append('/showIncludes') --- a/chrome/app/chrome_dll.rc +++ b/chrome/app/chrome_dll.rc @@ -37,7 +37,7 @@ IDR_MAINFRAME ACCELERATORS diff --git a/patches/ungoogled-chromium/windows/windows-disable-win-build-output.patch b/patches/ungoogled-chromium/windows/windows-disable-win-build-output.patch index 98e8cb98..f0fe2181 100644 --- a/patches/ungoogled-chromium/windows/windows-disable-win-build-output.patch +++ b/patches/ungoogled-chromium/windows/windows-disable-win-build-output.patch @@ -4,15 +4,7 @@ --- a/build/toolchain/win/midl.py +++ b/build/toolchain/win/midl.py -@@ -6,6 +6,7 @@ from __future__ import division - - import array - import difflib -+import distutils.file_util - import filecmp - import io - import operator -@@ -444,41 +445,13 @@ def main(arch, gendir, outdir, dynamic_g +@@ -444,40 +444,16 @@ def main(arch, gendir, outdir, dynamic_g if returncode != 0: return returncode @@ -32,7 +24,8 @@ - difflib.unified_diff( - io.open(fromfile).readlines(), - io.open(tofile).readlines(), fromfile, tofile))) -- ++ shutil.copy(os.path.join(midl_output_dir, f), outdir) + - if dynamic_guids: - # |idl_template| can contain one or more occurrences of guids prefixed - # with 'PLACEHOLDER-GUID-'. We first remove the extraneous @@ -51,10 +44,10 @@ - print('To rebaseline:') - print(r' copy /y %s\* %s' % (midl_output_dir, source)) - return 1 -- -+ distutils.file_util.copy_file(os.path.join(midl_output_dir, f), outdir, preserve_times=False) - return 0 ++ # Clean up output temp dir ++ shutil.rmtree(midl_output_dir) + return 0 --- a/build/win/message_compiler.py +++ b/build/win/message_compiler.py diff --git a/patches/ungoogled-chromium/windows/windows-fix-bindgen-libclang-path.patch b/patches/ungoogled-chromium/windows/windows-fix-bindgen-libclang-path.patch new file mode 100644 index 00000000..b3844a86 --- /dev/null +++ b/patches/ungoogled-chromium/windows/windows-fix-bindgen-libclang-path.patch @@ -0,0 +1,26 @@ +In the Linux cross build, libclang is under clang_base_path, not +rust_bindgen_root. Without this fix, bindgen breaks thusly: + + thread 'main' panicked at .../registry/src/index.crates.io-6f17d22bba15001f/bindgen-0.70.1/lib.rs:622:27: + Unable to find libclang: "couldn't find any valid shared libraries matching: ['libclang.so', 'libclang-*.so', 'libclang.so.*', 'libclang-*.so.*'], set the `LIBCLANG_PATH` environment variable to a path where one of these files can be found (invalid: [])" + +--- a/build/rust/rust_bindgen.gni ++++ b/build/rust/rust_bindgen.gni +@@ -23,6 +23,7 @@ _libclang_path = rust_bindgen_root + if (host_os == "win") { + _libclang_path += "/bin" + } else { ++ _libclang_path = clang_base_path + _libclang_path += "/lib" + } + +--- a/build/rust/rust_bindgen_generator.gni ++++ b/build/rust/rust_bindgen_generator.gni +@@ -22,6 +22,7 @@ _libclang_path = clang_base_path + if (host_os == "win") { + _libclang_path += "/bin" + } else { ++ _libclang_path = clang_base_path + _libclang_path += "/lib" + } + diff --git a/patches/ungoogled-chromium/windows/windows-fix-building-gn.patch b/patches/ungoogled-chromium/windows/windows-fix-building-gn.patch index d7ac195f..2b3f4d3a 100644 --- a/patches/ungoogled-chromium/windows/windows-fix-building-gn.patch +++ b/patches/ungoogled-chromium/windows/windows-fix-building-gn.patch @@ -1,28 +1,48 @@ # Fix building GN on Windows # Author: shiromichi on GitHub +(also fix a Python syntax warning) + --- a/tools/gn/bootstrap/bootstrap.py +++ b/tools/gn/bootstrap/bootstrap.py -@@ -69,7 +69,7 @@ def main(argv): +@@ -29,6 +29,8 @@ BOOTSTRAP_DIR = os.path.dirname(os.path. + GN_ROOT = os.path.dirname(BOOTSTRAP_DIR) + SRC_ROOT = os.path.dirname(os.path.dirname(GN_ROOT)) + ++GN_EXE = 'gn.exe' if sys.platform == 'win32' else 'gn' ++ + + def main(argv): + parser = optparse.OptionParser(description=sys.modules[__name__].__doc__) +@@ -69,7 +71,7 @@ def main(argv): else: build_rel = os.path.join('out', 'Release') out_dir = os.path.join(SRC_ROOT, build_rel) - gn_path = options.output or os.path.join(out_dir, 'gn') -+ gn_path = options.output or os.path.join(out_dir, 'gn.exe') ++ gn_path = options.output or os.path.join(out_dir, GN_EXE) gn_build_dir = os.path.join(out_dir, 'gn_build') ninja_binary = os.environ.get('NINJA', 'ninja') -@@ -122,11 +122,11 @@ def main(argv): +@@ -102,7 +104,7 @@ def main(argv): + append_to_env('LDFLAGS', [ + '-nodefaultlibs', 'libc++.gn.so', + '-lpthread', '-lc', '-lm', +- '-Wl,-rpath="\$$ORIGIN/."', '-Wl,-rpath-link=.' ++ '-Wl,-rpath=$ORIGIN/.', '-Wl,-rpath-link=.' + ]) + append_to_env('CXXFLAGS', [ + '-nostdinc++', '-isystem../../../buildtools/third_party/libc++', +@@ -122,11 +124,11 @@ def main(argv): shutil.copy2( os.path.join(BOOTSTRAP_DIR, 'last_commit_position.h'), gn_build_dir) - cmd = [ninja_binary, '-C', gn_build_dir, 'gn'] -+ cmd = [ninja_binary, '-C', gn_build_dir, 'gn.exe'] ++ cmd = [ninja_binary, '-C', gn_build_dir, GN_EXE] if options.jobs: cmd += ['-j', str(options.jobs)] subprocess.check_call(cmd) - shutil.copy2(os.path.join(gn_build_dir, 'gn'), gn_path) -+ shutil.copy2(os.path.join(gn_build_dir, 'gn.exe'), gn_path) ++ shutil.copy2(os.path.join(gn_build_dir, GN_EXE), gn_path) if not options.skip_generate_buildfiles: gn_gen_args = options.gn_gen_args or '' diff --git a/patches/ungoogled-chromium/windows/windows-fix-generate-resource-allowed-list.patch b/patches/ungoogled-chromium/windows/windows-fix-generate-resource-allowed-list.patch index 0415e555..5b4cca14 100644 --- a/patches/ungoogled-chromium/windows/windows-fix-generate-resource-allowed-list.patch +++ b/patches/ungoogled-chromium/windows/windows-fix-generate-resource-allowed-list.patch @@ -2,7 +2,17 @@ --- a/tools/resources/generate_resource_allowlist.py +++ b/tools/resources/generate_resource_allowlist.py -@@ -61,7 +61,7 @@ def GetResourceAllowlistPDB(path): +@@ -26,6 +26,9 @@ import ar + llvm_bindir = os.path.join(os.path.dirname(sys.argv[0]), '..', '..', + 'third_party', 'llvm-build', 'Release+Asserts', + 'bin') ++if sys.platform == 'linux': ++ # Use upstream LLVM ++ llvm_bindir = '/opt/llvm/bin' + + + def ExtractAllowlistFromFile(path, resource_ids): +@@ -61,7 +64,7 @@ def GetResourceAllowlistPDB(path): pdbutil = subprocess.Popen( [os.path.join(llvm_bindir, 'llvm-pdbutil'), 'dump', '-publics', path], stdout=subprocess.PIPE) @@ -11,7 +21,7 @@ for line in pdbutil.stdout: line = line.decode('utf8') # Read a line of the form -@@ -75,31 +75,35 @@ def GetResourceAllowlistPDB(path): +@@ -75,31 +78,40 @@ def GetResourceAllowlistPDB(path): # Example: __profd_??$AllowlistedResource@$0BGPH@@ui@@YAXXZ # C++ mangled names are supposed to begin with `?`, so check for that. if 'AllowlistedResource' in sym_name and sym_name.startswith('?'): @@ -25,6 +35,11 @@ - stdin=subprocess.PIPE, - stdout=subprocess.PIPE) - stdout, _ = undname.communicate(names.encode('utf8')) ++ if sys.platform == 'win32': ++ undname_exe = 'undname' ++ else: ++ undname_exe = os.path.join(llvm_bindir, 'llvm-undname') ++ resource_ids = set() - for line in stdout.split(b'\n'): - line = line.decode('utf8') @@ -42,7 +57,7 @@ - if exit_code != 0: - raise Exception('llvm-undname exited with exit code %d' % exit_code) + for name in names: -+ undname = subprocess.Popen(['undname', name], ++ undname = subprocess.Popen([undname_exe, name], + stdout=subprocess.PIPE) + found = False + for line in undname.stdout: diff --git a/patches/ungoogled-chromium/windows/windows-fix-licenses-gn-path.patch b/patches/ungoogled-chromium/windows/windows-fix-licenses-gn-path.patch index dc893c20..9f518ecd 100644 --- a/patches/ungoogled-chromium/windows/windows-fix-licenses-gn-path.patch +++ b/patches/ungoogled-chromium/windows/windows-fix-licenses-gn-path.patch @@ -1,8 +1,21 @@ --- a/tools/licenses/licenses.py +++ b/tools/licenses/licenses.py -@@ -811,7 +811,7 @@ def _GnBinary(): +@@ -808,10 +808,20 @@ def _GnBinary(): + exe = 'gn' + if sys.platform.startswith('linux'): + subdir = 'linux64' ++ if os.path.exists(exe): ++ return './' + exe ++ if 'GN' in os.environ: ++ return os.environ['GN'] ++ return exe # find in PATH elif sys.platform == 'darwin': subdir = 'mac' ++ if os.path.exists(exe): ++ return './' + exe ++ if 'GN' in os.environ: ++ return os.environ['GN'] ++ return exe # find in PATH elif sys.platform == 'win32': - subdir, exe = 'win', 'gn.exe' + subdir, exe = os.path.join('..', 'out', 'Release', 'gn_build'), 'gn.exe' diff --git a/patches/ungoogled-chromium/windows/windows-run-midl-via-wine.patch b/patches/ungoogled-chromium/windows/windows-run-midl-via-wine.patch new file mode 100644 index 00000000..500c8598 --- /dev/null +++ b/patches/ungoogled-chromium/windows/windows-run-midl-via-wine.patch @@ -0,0 +1,93 @@ +When cross-compiling on Linux, run MIDL via Wine. Unless we use the +pre-generated files provided by Google, we have no better alternative. + +Note that having MIDL invoke the Linux host's clang-cl via /cpp_cmd does +not work. The latter cannot handle paths with backslashes, and even if it +succeeds (e.g. using a wrapper script), MIDL thinks the invocation failed +for some reason. So we rely on CL to handle the preprocessing instead. + +--- a/build/toolchain/win/midl.py ++++ b/build/toolchain/win/midl.py +@@ -254,9 +254,39 @@ def run_midl(args, env_dict): + midl_output_dir = tempfile.mkdtemp() + delete_midl_output_dir = True + ++ midl_output_dir_native = midl_output_dir ++ if sys.platform == 'linux': ++ # Prepare to run MIDL via Wine ++ import glob ++ sdk_dir = os.environ['WINDOWSSDKDIR'] ++ midl_exe = glob.glob(f'{sdk_dir}/bin/*/x64/midl.exe')[0] ++ assert args[0] == 'midl' ++ args = ['wine', midl_exe] + args[1:] ++ # MIDL can't seem to invoke the Linux clang-cl correctly, so... ++ sysroot_dir = os.path.normpath(f'{sdk_dir}/../..') ++ cl_exe = glob.glob(f'{sysroot_dir}/VC/Tools/MSVC/*/bin/HostX64/x64/cl.exe')[0] ++ x = args.index('/cpp_cmd') ++ args[x + 1] = cl_exe ++ assert args[x + 2] == '/cpp_opt' ++ args[x + 3] = args[x + 3].replace('-Wno-nonportable-include-path', '') ++ # MIDL bugs out if it sees an absolute POSIX path with forward slashes ++ # in $INCLUDE (e.g. "/some/random/dir"), because it will interpret that ++ # as an option flag. It will happily accept the path if prefixed with a ++ # drive letter, however. Use "z:", as Wine maps that to "/" by default. ++ incs = env_dict['INCLUDE'] ++ incs_new = ';'.join(['z:' + dir for dir in incs.split(';')]) ++ env_dict['INCLUDE'] = incs_new ++ # Same issue here ++ midl_output_dir_native = 'z:' + midl_output_dir ++ # Set some Wine env vars to reduce noise ++ env_dict['WINEDEBUG'] = '-all' ++ env_dict['WINEDLLOVERRIDES'] = 'explorer.exe=' ++ # Quash "Fontconfig error: No writable cache directories" messages ++ env_dict['XDG_CACHE_HOME'] = '/tmp' ++ + try: +- popen = subprocess.Popen(args + ['/out', midl_output_dir], +- shell=True, ++ popen = subprocess.Popen(args + ['/out', midl_output_dir_native], ++ shell=(sys.platform == 'win32'), + universal_newlines=True, + env=env_dict, + stdout=subprocess.PIPE, +@@ -328,9 +358,6 @@ def main(arch, gendir, outdir, dynamic_g + source_exists = True + if not os.path.isdir(source): + source_exists = False +- if sys.platform != 'win32': +- print('Directory %s needs to be populated from Windows first' % source) +- return 1 + + # This is a brand new IDL file that does not have outputs under + # third_party\win_build_output\midl. We create an empty directory for now. +@@ -360,9 +387,6 @@ def main(arch, gendir, outdir, dynamic_g + file_path = os.path.join(source, source_file) + if not os.path.isfile(file_path): + source_exists = False +- if sys.platform != 'win32': +- print('File %s needs to be generated from Windows first' % file_path) +- return 1 + + # Either this is a brand new IDL file that does not have outputs under + # third_party\win_build_output\midl or the file is (unexpectedly) missing. +@@ -371,6 +395,9 @@ def main(arch, gendir, outdir, dynamic_g + # instruct the developer to copy that file under + # third_party\win_build_output\midl. + open(file_path, 'wb').close() ++ elif os.path.getsize(file_path) == 0: ++ # Regenerate empty files ++ source_exists = False + shutil.copy(file_path, outdir) + + if dynamic_guids != 'none': +@@ -401,10 +428,6 @@ def main(arch, gendir, outdir, dynamic_g + else: + dynamic_guids = None + +- # On non-Windows, that's all we can do. +- if sys.platform != 'win32': +- return 0 +- + idl_template = None + if dynamic_guids: + idl_template = idl diff --git a/patches/ungoogled-chromium/windows/windows-use-system-tools.patch b/patches/ungoogled-chromium/windows/windows-use-system-tools.patch new file mode 100644 index 00000000..f8529418 --- /dev/null +++ b/patches/ungoogled-chromium/windows/windows-use-system-tools.patch @@ -0,0 +1,83 @@ +System tools to use (instead of Google-provided binaries) when +cross-compiling on Linux or macOS + +--- a/build/toolchain/win/midl.gni ++++ b/build/toolchain/win/midl.gni +@@ -161,6 +161,14 @@ template("midl") { + assert(false, "Need environment for this arch") + } + ++ if (host_os == "win") { ++ clang = rebase_path("//third_party/llvm-build/Release+Asserts/bin/clang-cl.exe", ++ root_build_dir) ++ } else { ++ import("//build/config/clang/clang.gni") ++ clang = "$clang_base_path/bin/clang-cl" ++ } ++ + args = [ + win_tool_arch, + generated_dir, +@@ -171,8 +179,7 @@ template("midl") { + dlldata_file, + interface_identifier_file, + proxy_file, +- rebase_path("//third_party/llvm-build/Release+Asserts/bin/clang-cl.exe", +- root_build_dir), ++ clang, + "{{source}}", + "/char", + "signed", +--- a/build/toolchain/win/rc/rc.py ++++ b/build/toolchain/win/rc/rc.py +@@ -125,6 +125,9 @@ def Preprocess(rc_file_data, flags): + """Runs the input file through the preprocessor.""" + clang = os.path.join(SRC_DIR, 'third_party', 'llvm-build', + 'Release+Asserts', 'bin', 'clang-cl') ++ if sys.platform == 'linux': ++ # Use upstream clang-cl ++ clang = '/opt/llvm/bin/clang-cl' + # Let preprocessor write to a temp file so that it doesn't interfere + # with /showIncludes output on stdout. + if sys.platform == 'win32': +@@ -164,6 +167,7 @@ def Preprocess(rc_file_data, flags): + def RunRc(preprocessed_output, is_utf8, flags): + if sys.platform.startswith('linux'): + rc = os.path.join(THIS_DIR, 'linux64', 'rc') ++ rc = 'rc' # find in PATH + elif sys.platform == 'darwin': + rc = os.path.join(THIS_DIR, 'mac', 'rc') + elif sys.platform == 'win32': +--- a/chrome/credential_provider/build/make_setup.py ++++ b/chrome/credential_provider/build/make_setup.py +@@ -59,6 +59,8 @@ def GetLZMAExec(src_path): + The executable command to run the 7zip compressor. + """ + executable = '7zr' ++ if sys.platform != 'win32': ++ return executable # find in PATH + if sys.platform == 'win32': + executable += '.exe' + +--- a/third_party/node/node.py ++++ b/third_party/node/node.py +@@ -12,6 +12,8 @@ import os + + + def GetBinaryPath(): ++ if sys.platform != 'win32': ++ return 'node' # find in PATH + if platform.machine() == 'arm64': + darwin_path = 'mac_arm64' + darwin_name = 'node-darwin-arm64' +--- a/third_party/node/node.gni ++++ b/third_party/node/node.gni +@@ -23,7 +23,7 @@ template("node") { + # When use_remoteexec=true, node actions may run on remote + # Linux worker. So it should include linux node binary in inputs. + if (is_linux || is_chromeos || use_remoteexec) { +- inputs += [ "//third_party/node/linux/node-linux-x64/bin/node" ] ++ inputs += [ "/usr/bin/node" ] + } + if (is_win && host_os == "win") { + inputs += [ "//third_party/node/win/node.exe" ] diff --git a/skunk-tmp/ghci-strategy.sh b/skunk-tmp/ghci-strategy.sh new file mode 100755 index 00000000..03eb23f6 --- /dev/null +++ b/skunk-tmp/ghci-strategy.sh @@ -0,0 +1,229 @@ +#!/bin/sh +# ghci-strategy.sh +# +# This script facilitates building Chromium within the limitations of the +# GitHub CI environment. +# +# Usage: ghci-strategy.sh N_SPLIT TARGET ... +# +# Example: ghci-strategy.sh 8 chrome chromedriver mini_installer +# +# The GitHub CI environment imposes two notable constraints: +# +# 1. A single job cannot run longer than six hours, and +# +# 2. While multiple jobs can run concurrently, no network communication is +# permitted between them. +# +# A single GitHub runner cannot complete the u-c build due to #1, and a +# distributed-compilation solution like distcc cannot be used due to #2. +# An alternative solution, then, is to carve up the u-c build into multiple +# chunks that can be built independently. +# +# The build strategy implemented here divides the build into three stages: +# +# Stage 1: Produce most of the generated source code. +# +# Stage 2: Build most of the target-platform object files (.o, .obj). +# This is naturally the most CPU-intensive stage. +# +# Stage 3: Build the remaining objects, and link everything together +# +# Stages 1 and 3 run on a single system, but stage 2 can be divided across +# several (see the "split" variable below). Each stage inherits the build +# outputs of the preceding ones. +# +# This script generates several files, the most notable of which are +# +# ghci-stage1.ninja: Stage 1 build (target "ghci-stage1"). +# +# ghci-stage2.ninja: Stage 2 builds (target "part1", "part2", ...) +# +# Stage 3 has no special files/targets; just use the normal Ninja +# invocation with your original intended build targets. +# + +if [ $# -lt 2 ] +then + echo "usage: $0 N_SPLIT TARGET ..." + echo '(see script comment header for details)' + exit 1 +fi + +n_split="$1" +shift +targets="$*" + +export LC_COLLATE=C +unset NINJA_STATUS + +make_ninja_file() +{ + local target="$1" + local dep_list_file="$2" + + (echo 'subninja build.ninja' + echo + echo "build $target: phony \$" + awk '{ print " " $0 " $" }' $dep_list_file + echo) +} + +# A "heavyweight" target is one that has nearly everything else as a +# dependency. We need to keep these out of stages 1 and 2 or else the +# intended division of labor will fail. +check_for_heavyweight_dep() +{ + local dep_list_file="$1" + + make_ninja_file \ + ghci-check \ + $dep_list_file \ + > ghci-check.tmp.ninja + + ninja -f ghci-check.tmp.ninja -t inputs ghci-check > ghci-check.tmp.txt + + if grep -Fqx \ + -e chrome.dll.lib \ + -e resources.pak \ + -e gen/chrome/chrome_resource_allowlist.txt \ + ghci-check.tmp.txt + then + echo "Error: $dep_list_file contains dependency on heavyweight target" + exit 1 + fi +} + +######## + +echo "Build targets: $targets" + +ninja -t inputs $targets > ghci-inputs.tmp.txt + +cat ghci-inputs.tmp.txt \ +| grep -E '\.inputdeps(\.stamp)?$' \ +| grep -v '/chrome/chrome_initial\.inputdeps' \ +> ghci-inputdeps.txt + +echo "$(wc -l < ghci-inputdeps.txt) inputdeps targets to build in stage 1" +test -s ghci-inputdeps.txt || exit + +check_for_heavyweight_dep ghci-inputdeps.txt + +######## + +cat ghci-inputs.tmp.txt \ +| grep -E '^obj/\S+\.o(bj)?$' \ +| grep -v '/chrome/chrome_initial/' \ +| grep -v '/chrome/packed_resources_integrity/' \ +> ghci-objects.txt + +echo "$(wc -l < ghci-objects.txt) target-platform objects to compile in stages 1 and 2" +test -s ghci-objects.txt || exit + +check_for_heavyweight_dep ghci-objects.txt + +# note: re-using file from above check +cat ghci-check.tmp.txt \ +| grep -e '^phony/' -e '\.stamp$' \ +| grep -Ev '\.inputdeps(\.stamp)?$' \ +> ghci-objdeps.txt + +echo "$(wc -l < ghci-objdeps.txt) additional object dependencies to build in stage 1" + +# enable_precompiled_headers=true +(grep '/precompile\.cc\.obj$' ghci-objects.txt + grep '/precompile\.h-cc\.gch$' ghci-inputs.tmp.txt +) > ghci-pchdeps.txt + +test ! -s ghci-pchdeps.txt \ +|| echo "$(wc -l < ghci-pchdeps.txt) precompiled header objects to compile in stage 1" + +######## + +sort \ + ghci-inputdeps.txt \ + ghci-objdeps.txt \ + ghci-pchdeps.txt \ +> ghci-stage1.tmp.txt + +make_ninja_file ghci-stage1 ghci-stage1.tmp.txt > ghci-stage1.ninja + +ninja -f ghci-stage1.ninja -n ghci-stage1 > ghci-stage1-steps.tmp.txt +test -s ghci-stage1-steps.tmp.txt || exit +steps=$(wc -l < ghci-stage1-steps.tmp.txt) +objs_all=$(grep -Ec '\.o(bj)?$' ghci-stage1-steps.tmp.txt) +objs_tgt=$(grep -Ec ' obj/\S+\.o(bj)?$' ghci-stage1-steps.tmp.txt) + +if [ $objs_tgt -eq $objs_all ] +then + echo "$steps build steps in stage 1, including $objs_tgt target-platform objects" +else + echo "$steps build steps in stage 1, inc. $objs_all objects ($objs_tgt for target platform)" +fi + +# Sanity check +if [ $steps -lt 5000 -o $steps -gt 30000 -o $objs_tgt -gt 8000 ] +then + echo 'Error: outside of expected range' + exit 1 +fi + +######## + +# Remove from stage 2 the objects that are built in stage 1 + +ninja -f ghci-stage1.ninja -t inputs ghci-stage1 \ +| grep -E '^obj/.+\.o(bj)?$' \ +> ghci-objects-stage1.tmp.txt + +# Note: The size of this list does not match the $objs_tgt count that we +# obtained above, partly because not every object filename is printed in +# the Ninja log output. + +comm -23 \ + ghci-objects.txt \ + ghci-objects-stage1.tmp.txt \ +> ghci-objects-stage2.tmp.txt + +echo "$(wc -l < ghci-objects-stage2.tmp.txt) target-platform objects to compile in stage 2" + +######## + +echo "Splitting up stage 2 build into $n_split parts" + +split -a 1 -n r/$n_split \ + --numeric-suffixes=1 \ + --additional-suffix=.txt \ + ghci-objects-stage2.tmp.txt \ + ghci-objects-part + +ninja -t compdb > ghci-compdb.tmp.json + +$(dirname $0)/ninja-compdb-extract.py \ + ghci-compdb.tmp.json \ + ghci-objects-part*.txt \ +> ghci-stage2.ninja + +######## + +cat << END +Generated ghci-stage1.ninja, use target "ghci-stage1" +Generated ghci-stage2.ninja, use targets "part1", "part2", ... "part$n_split" +END + +if ! grep -q '^ command = test -s ' toolchain.ninja +then + perl -pi \ + -e 'if (/^rule /) { $a = / (cc|cxx)$/; }' \ + -e 'if ($a) { s/^( command) =/$1 = test -s \${out} && touch -c \${out} ||/; }' \ + toolchain.ninja + + echo 'Modified "cc" and "cxx" rules in toolchain.ninja' +fi + +rm ghci-*.tmp.json ghci-*.tmp.ninja ghci-*.tmp.txt + +echo 'GHCI strategy complete.' + +# end ghci-strategy.sh diff --git a/skunk-tmp/ninja-compdb-extract.py b/skunk-tmp/ninja-compdb-extract.py new file mode 100755 index 00000000..5b4a12a8 --- /dev/null +++ b/skunk-tmp/ninja-compdb-extract.py @@ -0,0 +1,64 @@ +#!/usr/bin/env python3 +# ninja-compdb-extract.py +# +# Helper script for ghci-strategy.sh. It extracts commands for specific +# outputs from a compilation database file generated by "ninja -t compdb", +# and writes them to a new, standalone Ninja file on stdout. +# +# Usage: ninja-compdb-extract.py \ +# COMPDB OUTPUT-LIST-PART-1 OUTPUT-LIST-PART-2 ... +# + +import json +import sys + +compdb_file = sys.argv[1] + +compdb = {} + +with open(compdb_file, 'r', encoding='iso8859-1') as f: + compdb_json = json.load(f) + for rec in compdb_json: + command = rec['command'] + output = rec['output'] + compdb[output] = command + +def ninja_escape(s): + return s.replace('$', '$$').replace(':', '$:') + +ninja_buf = [] +target_output_list = [] +part_num = 1 +rule_num = 0 + +for output_list_file in sys.argv[2:]: + target = f'part{part_num}' + target_output_list.append({'target': target, 'output_list': []}) + with open(output_list_file, 'r') as f: + for output in f: + output = output.strip() + target_output_list[-1]['output_list'].append(output) + command = compdb.get(output) + if command: + command_esc = ninja_escape(command) + deps = '\n deps = msvc' \ + if ' /showIncludes ' in command else '' + ninja_buf.append(f'rule r{rule_num:05}\n command = {command_esc}\n description = COMP {output}' + deps) + ninja_buf.append(f'build {output}: r{rule_num:05}\n') + else: + print(f'warning: no command for {output}', file=sys.stderr) + rule_num += 1 + part_num += 1 + +print('# Generated by ninja-compdb-extract.py') +print('#') +print('# Targets: ' + ' '.join([d['target'] for d in target_output_list])) +print('#\n') +print('\n'.join(ninja_buf)) +for d in target_output_list: + target = d['target'] + output_list = d['output_list'] + print(f'build {target}: ' + ' $\n '.join(['phony'] + output_list) + '\n') +print('# EOF') + +# end ninja-compdb-extract.py